From cb655a34c9fc658c9e485bb6fb5404f9ae8adaf7 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 16:50:56 +0000
Subject: [PATCH 01/11] Initial plan
From 352cf29f1c0d7390d3dd9a7d193e5e2ecc85463c Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 16:55:58 +0000
Subject: [PATCH 02/11] Initial plan for upload-asset standardization
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/audit-workflows.lock.yml | 4 ++--
.github/workflows/copilot-pr-nlp-analysis.lock.yml | 4 ++--
.github/workflows/copilot-session-insights.lock.yml | 4 ++--
.github/workflows/daily-copilot-token-report.lock.yml | 4 ++--
.github/workflows/daily-file-diet.lock.yml | 4 ++--
.github/workflows/daily-firewall-report.lock.yml | 4 ++--
.github/workflows/daily-issues-report.lock.yml | 4 ++--
.github/workflows/daily-multi-device-docs-tester.lock.yml | 4 ++--
.github/workflows/daily-news.lock.yml | 4 ++--
.github/workflows/daily-performance-summary.lock.yml | 4 ++--
.github/workflows/daily-repo-chronicle.lock.yml | 4 ++--
.github/workflows/deep-report.lock.yml | 4 ++--
.github/workflows/docs-noob-tester.lock.yml | 4 ++--
.github/workflows/github-mcp-structural-analysis.lock.yml | 4 ++--
.github/workflows/intelligence.lock.yml | 4 ++--
.github/workflows/org-health-report.lock.yml | 4 ++--
.github/workflows/poem-bot.lock.yml | 4 ++--
.github/workflows/portfolio-analyst.lock.yml | 4 ++--
.github/workflows/python-data-charts.lock.yml | 4 ++--
.github/workflows/stale-repo-identifier.lock.yml | 4 ++--
.github/workflows/technical-doc-writer.lock.yml | 4 ++--
.github/workflows/unbloat-docs.lock.yml | 4 ++--
.github/workflows/weekly-issue-summary.lock.yml | 4 ++--
23 files changed, 46 insertions(+), 46 deletions(-)
diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml
index ab4dfd105fa..f0ea67fbacf 100644
--- a/.github/workflows/audit-workflows.lock.yml
+++ b/.github/workflows/audit-workflows.lock.yml
@@ -7662,7 +7662,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7703,7 +7703,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index dd098dfc428..9cde9595a90 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -8555,7 +8555,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8596,7 +8596,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml
index e68f89fa1b3..4ff3186d91d 100644
--- a/.github/workflows/copilot-session-insights.lock.yml
+++ b/.github/workflows/copilot-session-insights.lock.yml
@@ -8647,7 +8647,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8688,7 +8688,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml
index ec37993d73a..47862800598 100644
--- a/.github/workflows/daily-copilot-token-report.lock.yml
+++ b/.github/workflows/daily-copilot-token-report.lock.yml
@@ -8645,7 +8645,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8686,7 +8686,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 6c454896853..240f78bb809 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -8811,7 +8811,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8852,7 +8852,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index c7864c908d1..5ca4d84ac14 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -8328,7 +8328,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8369,7 +8369,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml
index c9cef0bf9d0..31014292aa7 100644
--- a/.github/workflows/daily-issues-report.lock.yml
+++ b/.github/workflows/daily-issues-report.lock.yml
@@ -9202,7 +9202,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -9243,7 +9243,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml
index f1c8bb878e7..c09b1ee3554 100644
--- a/.github/workflows/daily-multi-device-docs-tester.lock.yml
+++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml
@@ -7262,7 +7262,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7303,7 +7303,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index bec0e4949a9..d685321a271 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -8450,7 +8450,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8491,7 +8491,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml
index 21ba7f69c9e..eb18403b7a6 100644
--- a/.github/workflows/daily-performance-summary.lock.yml
+++ b/.github/workflows/daily-performance-summary.lock.yml
@@ -10273,7 +10273,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -10314,7 +10314,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml
index f5175eec982..e34aee6917a 100644
--- a/.github/workflows/daily-repo-chronicle.lock.yml
+++ b/.github/workflows/daily-repo-chronicle.lock.yml
@@ -8371,7 +8371,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8412,7 +8412,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index 0855b02f3a2..429babad867 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -8370,7 +8370,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8411,7 +8411,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml
index 174429b9005..b7d5d6e946e 100644
--- a/.github/workflows/docs-noob-tester.lock.yml
+++ b/.github/workflows/docs-noob-tester.lock.yml
@@ -7838,7 +7838,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7879,7 +7879,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml
index 774b546cf29..bb4226e7f20 100644
--- a/.github/workflows/github-mcp-structural-analysis.lock.yml
+++ b/.github/workflows/github-mcp-structural-analysis.lock.yml
@@ -8000,7 +8000,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8041,7 +8041,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index 02434d8c259..7c1795158a8 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -8801,7 +8801,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8842,7 +8842,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml
index 4851b14299c..6f8a5b19c6c 100644
--- a/.github/workflows/org-health-report.lock.yml
+++ b/.github/workflows/org-health-report.lock.yml
@@ -8481,7 +8481,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8522,7 +8522,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml
index 70f2d97876b..e79bd0dac7b 100644
--- a/.github/workflows/poem-bot.lock.yml
+++ b/.github/workflows/poem-bot.lock.yml
@@ -13337,7 +13337,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -13378,7 +13378,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml
index 8aae29dfed5..9d71bd8dd56 100644
--- a/.github/workflows/portfolio-analyst.lock.yml
+++ b/.github/workflows/portfolio-analyst.lock.yml
@@ -8476,7 +8476,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8517,7 +8517,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml
index da670a0f069..bf5e583f1a7 100644
--- a/.github/workflows/python-data-charts.lock.yml
+++ b/.github/workflows/python-data-charts.lock.yml
@@ -8773,7 +8773,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8814,7 +8814,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index cab26c7974c..3ca0d732add 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -8283,7 +8283,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8324,7 +8324,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml
index 7bbfc732b86..7717aae3152 100644
--- a/.github/workflows/technical-doc-writer.lock.yml
+++ b/.github/workflows/technical-doc-writer.lock.yml
@@ -8805,7 +8805,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8846,7 +8846,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml
index 6068fce356f..1f51305800c 100644
--- a/.github/workflows/unbloat-docs.lock.yml
+++ b/.github/workflows/unbloat-docs.lock.yml
@@ -9012,7 +9012,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -9053,7 +9053,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml
index debca14f01f..4037ead1aa7 100644
--- a/.github/workflows/weekly-issue-summary.lock.yml
+++ b/.github/workflows/weekly-issue-summary.lock.yml
@@ -8279,7 +8279,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8320,7 +8320,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of uploadAssetItems) {
+ for (const asset of allUploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
From 6abd7a5d9b6f3ff9cd97184ce4166c57654996d1 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 17:07:33 +0000
Subject: [PATCH 03/11] Update upload-asset references: JS, Go code, schema,
and codemod
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/ai-moderator.lock.yml | 2 +-
.../copilot-pr-nlp-analysis.lock.yml | 191 +----------------
.github/workflows/daily-code-metrics.lock.yml | 4 +-
.github/workflows/daily-file-diet.lock.yml | 193 +-----------------
.github/workflows/dev-hawk.lock.yml | 2 +-
.github/workflows/firewall-escape.lock.yml | 2 +-
.../github-mcp-structural-analysis.lock.yml | 191 +----------------
.../workflows/go-pattern-detector.lock.yml | 2 +-
.github/workflows/intelligence.lock.yml | 191 +----------------
.github/workflows/issue-classifier.lock.yml | 2 +-
.github/workflows/issue-monster.lock.yml | 2 +-
.github/workflows/mcp-inspector.lock.yml | 4 +-
.../workflows/notion-issue-summary.lock.yml | 2 +-
.../prompt-clustering-analysis.lock.yml | 6 +-
.github/workflows/release.lock.yml | 14 +-
.github/workflows/safe-output-health.lock.yml | 2 +-
.../workflows/slide-deck-maintainer.lock.yml | 2 +-
.../smoke-copilot-playwright.lock.yml | 2 +-
.github/workflows/smoke-detector.lock.yml | 2 +-
.../workflows/static-analysis-report.lock.yml | 2 +-
.github/workflows/super-linter.lock.yml | 8 +-
.github/workflows/tidy.lock.yml | 4 +-
pkg/cli/fix_codemods.go | 99 +++++++++
pkg/parser/schemas/main_workflow_schema.json | 2 +-
.../checkout_persist_credentials_test.go | 2 +-
pkg/workflow/compiler_types.go | 2 +-
pkg/workflow/imports.go | 2 +-
pkg/workflow/js/upload_assets.cjs | 13 +-
pkg/workflow/js/upload_assets.test.cjs | 19 ++
pkg/workflow/publish_assets.go | 8 +-
pkg/workflow/publish_assets_test.go | 4 +-
pkg/workflow/safe_outputs_config.go | 2 +-
pkg/workflow/upload_assets_config_test.go | 4 +-
33 files changed, 192 insertions(+), 795 deletions(-)
diff --git a/.github/workflows/ai-moderator.lock.yml b/.github/workflows/ai-moderator.lock.yml
index c46dc6c72a2..cc1e3a190ba 100644
--- a/.github/workflows/ai-moderator.lock.yml
+++ b/.github/workflows/ai-moderator.lock.yml
@@ -5800,7 +5800,7 @@ jobs:
- name: Check if actor is external user or GitHub Action bot
id: check_actor
if: ${{ github.event_name != 'workflow_dispatch' }}
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |-
const actor = context.actor;
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index 9cde9595a90..fd6766dea38 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -147,9 +147,6 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -177,7 +174,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -185,7 +182,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
@@ -351,7 +348,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
+ {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -381,23 +378,6 @@ jobs:
},
"name": "create_discussion"
},
- {
- "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
- "inputSchema": {
- "additionalProperties": false,
- "properties": {
- "path": {
- "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
- "type": "string"
- }
- },
- "required": [
- "path"
- ],
- "type": "object"
- },
- "name": "upload_asset"
- },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -503,15 +483,6 @@ jobs:
"maxLength": 65000
}
}
- },
- "upload_asset": {
- "defaultMax": 10,
- "fields": {
- "path": {
- "required": true,
- "type": "string"
- }
- }
}
}
EOF
@@ -1852,9 +1823,6 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
- GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
- GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
- GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
mkdir -p /home/runner/.copilot
@@ -2934,7 +2902,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3231,9 +3199,6 @@ jobs:
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -6271,13 +6236,6 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
- - name: Upload safe outputs assets
- if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- with:
- name: safe-outputs-assets
- path: /tmp/gh-aw/safeoutputs/assets/
- if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7287,13 +7245,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "copilot-pr-nlp-analysis"
GH_AW_WORKFLOW_NAME: "Copilot PR Conversation NLP Analysis"
@@ -8477,142 +8432,6 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
- id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- env:
- GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- with:
- github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
- const fs = require("fs");
- const path = require("path");
- const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
- function normalizeBranchName(branchName) {
- if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
- return branchName;
- }
- let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
- normalized = normalized.replace(/-+/g, "-");
- normalized = normalized.replace(/^-+|-+$/g, "");
- if (normalized.length > 128) {
- normalized = normalized.substring(0, 128);
- }
- normalized = normalized.replace(/-+$/, "");
- normalized = normalized.toLowerCase();
- return normalized;
- }
- async function main() {
- const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
- const branchName = process.env.GH_AW_ASSETS_BRANCH;
- if (!branchName || typeof branchName !== "string") {
- core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
- return;
- }
- const normalizedBranchName = normalizeBranchName(branchName);
- core.info(`Using assets branch: ${normalizedBranchName}`);
- const result = loadAgentOutput();
- if (!result.success) {
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
- core.info("No upload-asset items found in agent output");
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
- let uploadCount = 0;
- let hasChanges = false;
- try {
- try {
- await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
- await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
- core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
- } catch (originError) {
- if (!normalizedBranchName.startsWith("assets/")) {
- core.setFailed(
- `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
- `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
- `Please create the branch manually first, or use a branch name starting with 'assets/'.`
- );
- return;
- }
- core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
- await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
- await exec.exec(`git rm -rf .`);
- await exec.exec(`git clean -fdx`);
- }
- for (const asset of allUploadItems) {
- try {
- const { fileName, sha, size, targetFileName } = asset;
- if (!fileName || !sha || !targetFileName) {
- core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
- continue;
- }
- const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
- if (!fs.existsSync(assetSourcePath)) {
- core.warning(`Asset file not found: ${assetSourcePath}`);
- continue;
- }
- const fileContent = fs.readFileSync(assetSourcePath);
- const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
- if (computedSha !== sha) {
- core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
- continue;
- }
- if (fs.existsSync(targetFileName)) {
- core.info(`Asset ${targetFileName} already exists, skipping`);
- continue;
- }
- fs.copyFileSync(assetSourcePath, targetFileName);
- await exec.exec(`git add "${targetFileName}"`);
- uploadCount++;
- hasChanges = true;
- core.info(`Added asset: ${targetFileName} (${size} bytes)`);
- } catch (error) {
- core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
- }
- }
- if (hasChanges) {
- const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
- await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
- if (isStaged) {
- core.summary.addRaw("## Staged Asset Publication");
- } else {
- await exec.exec(`git push origin ${normalizedBranchName}`);
- core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
- core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
- }
- for (const asset of allUploadItems) {
- if (asset.fileName && asset.sha && asset.size && asset.url) {
- core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
- }
- }
- core.summary.write();
- } else {
- core.info("No new assets to upload");
- }
- } catch (error) {
- core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
- return;
- }
- core.setOutput("upload_count", uploadCount.toString());
- core.setOutput("branch_name", normalizedBranchName);
- }
- (async () => { await main(); })();
update_cache_memory:
needs:
diff --git a/.github/workflows/daily-code-metrics.lock.yml b/.github/workflows/daily-code-metrics.lock.yml
index 6a86afdc39a..9d245b2b8d8 100644
--- a/.github/workflows/daily-code-metrics.lock.yml
+++ b/.github/workflows/daily-code-metrics.lock.yml
@@ -170,7 +170,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -178,7 +178,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 240f78bb809..e2579ee11c2 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -149,9 +149,6 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -189,7 +186,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -197,7 +194,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
@@ -379,7 +376,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
+ {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -423,23 +420,6 @@ jobs:
},
"name": "create_issue"
},
- {
- "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
- "inputSchema": {
- "additionalProperties": false,
- "properties": {
- "path": {
- "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
- "type": "string"
- }
- },
- "required": [
- "path"
- ],
- "type": "object"
- },
- "name": "upload_asset"
- },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -552,15 +532,6 @@ jobs:
"maxLength": 65000
}
}
- },
- "upload_asset": {
- "defaultMax": 10,
- "fields": {
- "path": {
- "required": true,
- "type": "string"
- }
- }
}
}
EOF
@@ -1901,9 +1872,6 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
- GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
- GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
- GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
mkdir -p /home/runner/.copilot
@@ -2971,7 +2939,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3269,9 +3237,6 @@ jobs:
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -6318,13 +6283,6 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
- - name: Upload safe outputs assets
- if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- with:
- name: safe-outputs-assets
- path: /tmp/gh-aw/safeoutputs/assets/
- if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7757,13 +7715,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-file-diet"
GH_AW_WORKFLOW_ID: "daily-file-diet"
@@ -7793,7 +7748,7 @@ jobs:
owner: ${{ github.repository_owner }}
repositories: ${{ github.event.repository.name }}
github-api-url: ${{ github.api_url }}
- permission-contents: write
+ permission-contents: read
permission-issues: write
- name: Setup JavaScript files
id: setup_scripts
@@ -8733,142 +8688,6 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
- id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- env:
- GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- with:
- github-token: ${{ steps.app-token.outputs.token }}
- script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
- const fs = require("fs");
- const path = require("path");
- const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
- function normalizeBranchName(branchName) {
- if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
- return branchName;
- }
- let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
- normalized = normalized.replace(/-+/g, "-");
- normalized = normalized.replace(/^-+|-+$/g, "");
- if (normalized.length > 128) {
- normalized = normalized.substring(0, 128);
- }
- normalized = normalized.replace(/-+$/, "");
- normalized = normalized.toLowerCase();
- return normalized;
- }
- async function main() {
- const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
- const branchName = process.env.GH_AW_ASSETS_BRANCH;
- if (!branchName || typeof branchName !== "string") {
- core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
- return;
- }
- const normalizedBranchName = normalizeBranchName(branchName);
- core.info(`Using assets branch: ${normalizedBranchName}`);
- const result = loadAgentOutput();
- if (!result.success) {
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
- core.info("No upload-asset items found in agent output");
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
- let uploadCount = 0;
- let hasChanges = false;
- try {
- try {
- await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
- await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
- core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
- } catch (originError) {
- if (!normalizedBranchName.startsWith("assets/")) {
- core.setFailed(
- `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
- `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
- `Please create the branch manually first, or use a branch name starting with 'assets/'.`
- );
- return;
- }
- core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
- await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
- await exec.exec(`git rm -rf .`);
- await exec.exec(`git clean -fdx`);
- }
- for (const asset of allUploadItems) {
- try {
- const { fileName, sha, size, targetFileName } = asset;
- if (!fileName || !sha || !targetFileName) {
- core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
- continue;
- }
- const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
- if (!fs.existsSync(assetSourcePath)) {
- core.warning(`Asset file not found: ${assetSourcePath}`);
- continue;
- }
- const fileContent = fs.readFileSync(assetSourcePath);
- const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
- if (computedSha !== sha) {
- core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
- continue;
- }
- if (fs.existsSync(targetFileName)) {
- core.info(`Asset ${targetFileName} already exists, skipping`);
- continue;
- }
- fs.copyFileSync(assetSourcePath, targetFileName);
- await exec.exec(`git add "${targetFileName}"`);
- uploadCount++;
- hasChanges = true;
- core.info(`Added asset: ${targetFileName} (${size} bytes)`);
- } catch (error) {
- core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
- }
- }
- if (hasChanges) {
- const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
- await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
- if (isStaged) {
- core.summary.addRaw("## Staged Asset Publication");
- } else {
- await exec.exec(`git push origin ${normalizedBranchName}`);
- core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
- core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
- }
- for (const asset of allUploadItems) {
- if (asset.fileName && asset.sha && asset.size && asset.url) {
- core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
- }
- }
- core.summary.write();
- } else {
- core.info("No new assets to upload");
- }
- } catch (error) {
- core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
- return;
- }
- core.setOutput("upload_count", uploadCount.toString());
- core.setOutput("branch_name", normalizedBranchName);
- }
- (async () => { await main(); })();
- name: Invalidate GitHub App token
if: always() && steps.app-token.outputs.token != ''
env:
diff --git a/.github/workflows/dev-hawk.lock.yml b/.github/workflows/dev-hawk.lock.yml
index 20965099166..c5057a15c84 100644
--- a/.github/workflows/dev-hawk.lock.yml
+++ b/.github/workflows/dev-hawk.lock.yml
@@ -174,7 +174,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml
index 85fe48f5989..83c3e846026 100644
--- a/.github/workflows/firewall-escape.lock.yml
+++ b/.github/workflows/firewall-escape.lock.yml
@@ -2978,7 +2978,7 @@ jobs:
steps:
- name: Create issue on test failure
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
await github.rest.issues.create({
diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml
index bb4226e7f20..72803a61065 100644
--- a/.github/workflows/github-mcp-structural-analysis.lock.yml
+++ b/.github/workflows/github-mcp-structural-analysis.lock.yml
@@ -147,9 +147,6 @@ jobs:
concurrency:
group: "gh-aw-claude-${{ github.workflow }}"
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -175,7 +172,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -183,7 +180,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
@@ -333,7 +330,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
+ {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -363,23 +360,6 @@ jobs:
},
"name": "create_discussion"
},
- {
- "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
- "inputSchema": {
- "additionalProperties": false,
- "properties": {
- "path": {
- "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
- "type": "string"
- }
- },
- "required": [
- "path"
- ],
- "type": "object"
- },
- "name": "upload_asset"
- },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -485,15 +465,6 @@ jobs:
"maxLength": 65000
}
}
- },
- "upload_asset": {
- "defaultMax": 10,
- "fields": {
- "path": {
- "required": true,
- "type": "string"
- }
- }
}
}
EOF
@@ -1834,9 +1805,6 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
- GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
- GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
- GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
cat > /tmp/gh-aw/mcp-config/mcp-servers.json << EOF
@@ -2722,7 +2690,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3092,9 +3060,6 @@ jobs:
DISABLE_BUG_COMMAND: 1
DISABLE_ERROR_REPORTING: 1
DISABLE_TELEMETRY: 1
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
GH_AW_MODEL_AGENT_CLAUDE: ${{ vars.GH_AW_MODEL_AGENT_CLAUDE || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -5710,13 +5675,6 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
- - name: Upload safe outputs assets
- if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- with:
- name: safe-outputs-assets
- path: /tmp/gh-aw/safeoutputs/assets/
- if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -6732,13 +6690,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_WORKFLOW_ID: "github-mcp-structural-analysis"
GH_AW_WORKFLOW_NAME: "GitHub MCP Structural Analysis"
@@ -7922,142 +7877,6 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
- id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- env:
- GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- with:
- github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
- const fs = require("fs");
- const path = require("path");
- const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
- function normalizeBranchName(branchName) {
- if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
- return branchName;
- }
- let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
- normalized = normalized.replace(/-+/g, "-");
- normalized = normalized.replace(/^-+|-+$/g, "");
- if (normalized.length > 128) {
- normalized = normalized.substring(0, 128);
- }
- normalized = normalized.replace(/-+$/, "");
- normalized = normalized.toLowerCase();
- return normalized;
- }
- async function main() {
- const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
- const branchName = process.env.GH_AW_ASSETS_BRANCH;
- if (!branchName || typeof branchName !== "string") {
- core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
- return;
- }
- const normalizedBranchName = normalizeBranchName(branchName);
- core.info(`Using assets branch: ${normalizedBranchName}`);
- const result = loadAgentOutput();
- if (!result.success) {
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
- core.info("No upload-asset items found in agent output");
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
- let uploadCount = 0;
- let hasChanges = false;
- try {
- try {
- await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
- await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
- core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
- } catch (originError) {
- if (!normalizedBranchName.startsWith("assets/")) {
- core.setFailed(
- `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
- `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
- `Please create the branch manually first, or use a branch name starting with 'assets/'.`
- );
- return;
- }
- core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
- await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
- await exec.exec(`git rm -rf .`);
- await exec.exec(`git clean -fdx`);
- }
- for (const asset of allUploadItems) {
- try {
- const { fileName, sha, size, targetFileName } = asset;
- if (!fileName || !sha || !targetFileName) {
- core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
- continue;
- }
- const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
- if (!fs.existsSync(assetSourcePath)) {
- core.warning(`Asset file not found: ${assetSourcePath}`);
- continue;
- }
- const fileContent = fs.readFileSync(assetSourcePath);
- const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
- if (computedSha !== sha) {
- core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
- continue;
- }
- if (fs.existsSync(targetFileName)) {
- core.info(`Asset ${targetFileName} already exists, skipping`);
- continue;
- }
- fs.copyFileSync(assetSourcePath, targetFileName);
- await exec.exec(`git add "${targetFileName}"`);
- uploadCount++;
- hasChanges = true;
- core.info(`Added asset: ${targetFileName} (${size} bytes)`);
- } catch (error) {
- core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
- }
- }
- if (hasChanges) {
- const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
- await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
- if (isStaged) {
- core.summary.addRaw("## Staged Asset Publication");
- } else {
- await exec.exec(`git push origin ${normalizedBranchName}`);
- core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
- core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
- }
- for (const asset of allUploadItems) {
- if (asset.fileName && asset.sha && asset.size && asset.url) {
- core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
- }
- }
- core.summary.write();
- } else {
- core.info("No new assets to upload");
- }
- } catch (error) {
- core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
- return;
- }
- core.setOutput("upload_count", uploadCount.toString());
- core.setOutput("branch_name", normalizedBranchName);
- }
- (async () => { await main(); })();
update_cache_memory:
needs:
diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml
index 778f79869f5..d993ca381af 100644
--- a/.github/workflows/go-pattern-detector.lock.yml
+++ b/.github/workflows/go-pattern-detector.lock.yml
@@ -5380,7 +5380,7 @@ jobs:
found_patterns: ${{ steps.detect.outputs.found_patterns }}
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
persist-credentials: false
- name: Install ast-grep
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index 7c1795158a8..664d8ac0dce 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -154,9 +154,6 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -182,7 +179,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -190,7 +187,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
@@ -372,7 +369,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
+ {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -416,23 +413,6 @@ jobs:
},
"name": "create_issue"
},
- {
- "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
- "inputSchema": {
- "additionalProperties": false,
- "properties": {
- "path": {
- "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
- "type": "string"
- }
- },
- "required": [
- "path"
- ],
- "type": "object"
- },
- "name": "upload_asset"
- },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -545,15 +525,6 @@ jobs:
"maxLength": 65000
}
}
- },
- "upload_asset": {
- "defaultMax": 10,
- "fields": {
- "path": {
- "required": true,
- "type": "string"
- }
- }
}
}
EOF
@@ -1894,9 +1865,6 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
- GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
- GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
- GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
mkdir -p /home/runner/.copilot
@@ -3205,7 +3173,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3501,9 +3469,6 @@ jobs:
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -6550,13 +6515,6 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
- - name: Upload safe outputs assets
- if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
- with:
- name: safe-outputs-assets
- path: /tmp/gh-aw/safeoutputs/assets/
- if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7761,13 +7719,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "intelligence"
GH_AW_WORKFLOW_NAME: "Campaign Intelligence System"
@@ -8723,142 +8678,6 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
- id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- env:
- GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- with:
- github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
- const fs = require("fs");
- const path = require("path");
- const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
- function normalizeBranchName(branchName) {
- if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
- return branchName;
- }
- let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
- normalized = normalized.replace(/-+/g, "-");
- normalized = normalized.replace(/^-+|-+$/g, "");
- if (normalized.length > 128) {
- normalized = normalized.substring(0, 128);
- }
- normalized = normalized.replace(/-+$/, "");
- normalized = normalized.toLowerCase();
- return normalized;
- }
- async function main() {
- const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
- const branchName = process.env.GH_AW_ASSETS_BRANCH;
- if (!branchName || typeof branchName !== "string") {
- core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
- return;
- }
- const normalizedBranchName = normalizeBranchName(branchName);
- core.info(`Using assets branch: ${normalizedBranchName}`);
- const result = loadAgentOutput();
- if (!result.success) {
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
- core.info("No upload-asset items found in agent output");
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
- let uploadCount = 0;
- let hasChanges = false;
- try {
- try {
- await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
- await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
- core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
- } catch (originError) {
- if (!normalizedBranchName.startsWith("assets/")) {
- core.setFailed(
- `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
- `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
- `Please create the branch manually first, or use a branch name starting with 'assets/'.`
- );
- return;
- }
- core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
- await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
- await exec.exec(`git rm -rf .`);
- await exec.exec(`git clean -fdx`);
- }
- for (const asset of allUploadItems) {
- try {
- const { fileName, sha, size, targetFileName } = asset;
- if (!fileName || !sha || !targetFileName) {
- core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
- continue;
- }
- const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
- if (!fs.existsSync(assetSourcePath)) {
- core.warning(`Asset file not found: ${assetSourcePath}`);
- continue;
- }
- const fileContent = fs.readFileSync(assetSourcePath);
- const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
- if (computedSha !== sha) {
- core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
- continue;
- }
- if (fs.existsSync(targetFileName)) {
- core.info(`Asset ${targetFileName} already exists, skipping`);
- continue;
- }
- fs.copyFileSync(assetSourcePath, targetFileName);
- await exec.exec(`git add "${targetFileName}"`);
- uploadCount++;
- hasChanges = true;
- core.info(`Added asset: ${targetFileName} (${size} bytes)`);
- } catch (error) {
- core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
- }
- }
- if (hasChanges) {
- const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
- await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
- if (isStaged) {
- core.summary.addRaw("## Staged Asset Publication");
- } else {
- await exec.exec(`git push origin ${normalizedBranchName}`);
- core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
- core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
- }
- for (const asset of allUploadItems) {
- if (asset.fileName && asset.sha && asset.size && asset.url) {
- core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
- }
- }
- core.summary.write();
- } else {
- core.info("No new assets to upload");
- }
- } catch (error) {
- core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
- return;
- }
- core.setOutput("upload_count", uploadCount.toString());
- core.setOutput("branch_name", normalizedBranchName);
- }
- (async () => { await main(); })();
update_cache_memory:
needs:
diff --git a/.github/workflows/issue-classifier.lock.yml b/.github/workflows/issue-classifier.lock.yml
index d578b06cb3f..f86933a2ed9 100644
--- a/.github/workflows/issue-classifier.lock.yml
+++ b/.github/workflows/issue-classifier.lock.yml
@@ -2995,7 +2995,7 @@ jobs:
path: /tmp/gh-aw/aw_info.json
if-no-files-found: warn
- name: Run AI Inference
- uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v1
+ uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
env:
GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml
index 96ab016ee7f..d6027623284 100644
--- a/.github/workflows/issue-monster.lock.yml
+++ b/.github/workflows/issue-monster.lock.yml
@@ -8449,7 +8449,7 @@ jobs:
steps:
- name: Search for candidate issues
id: search
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
const { owner, repo } = context.repo;
diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml
index a0145b73f0a..cfcd912f635 100644
--- a/.github/workflows/mcp-inspector.lock.yml
+++ b/.github/workflows/mcp-inspector.lock.yml
@@ -6944,7 +6944,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
@@ -7074,7 +7074,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Post message to Slack
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
SLACK_CHANNEL_ID: ${{ env.GH_AW_SLACK_CHANNEL_ID }}
diff --git a/.github/workflows/notion-issue-summary.lock.yml b/.github/workflows/notion-issue-summary.lock.yml
index 8f003a4ca32..ebbdfd85f67 100644
--- a/.github/workflows/notion-issue-summary.lock.yml
+++ b/.github/workflows/notion-issue-summary.lock.yml
@@ -6340,7 +6340,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml
index cc9955aefd4..2fb324a7eac 100644
--- a/.github/workflows/prompt-clustering-analysis.lock.yml
+++ b/.github/workflows/prompt-clustering-analysis.lock.yml
@@ -173,7 +173,7 @@ jobs:
- name: Set up jq utilities directory
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
@@ -198,7 +198,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -206,7 +206,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml
index e203e68d272..0737a63983f 100644
--- a/.github/workflows/release.lock.yml
+++ b/.github/workflows/release.lock.yml
@@ -6489,28 +6489,28 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: false
go-version-file: go.mod
- name: Download Go modules
run: go mod download
- name: Generate SBOM (SPDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
with:
artifact-name: sbom.spdx.json
format: spdx-json
output-file: sbom.spdx.json
- name: Generate SBOM (CycloneDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
with:
artifact-name: sbom.cdx.json
format: cyclonedx-json
output-file: sbom.cdx.json
- name: Upload SBOM artifacts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: sbom-artifacts
path: |
@@ -6689,12 +6689,12 @@ jobs:
release_tag: ${{ steps.get_release.outputs.release_tag }}
steps:
- name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
fetch-depth: 0
persist-credentials: false
- name: Release with gh-extension-precompile
- uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2
+ uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2.1.0
with:
build_script_override: scripts/build-release.sh
go_version_file: go.mod
diff --git a/.github/workflows/safe-output-health.lock.yml b/.github/workflows/safe-output-health.lock.yml
index d294d1f9053..b7cc22f6fe6 100644
--- a/.github/workflows/safe-output-health.lock.yml
+++ b/.github/workflows/safe-output-health.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/slide-deck-maintainer.lock.yml b/.github/workflows/slide-deck-maintainer.lock.yml
index 36a4eb67f44..0d5ef45097b 100644
--- a/.github/workflows/slide-deck-maintainer.lock.yml
+++ b/.github/workflows/slide-deck-maintainer.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
cache: npm
cache-dependency-path: docs/package-lock.json
diff --git a/.github/workflows/smoke-copilot-playwright.lock.yml b/.github/workflows/smoke-copilot-playwright.lock.yml
index eec59eb6873..fb2889dbc16 100644
--- a/.github/workflows/smoke-copilot-playwright.lock.yml
+++ b/.github/workflows/smoke-copilot-playwright.lock.yml
@@ -7624,7 +7624,7 @@ jobs:
run: "echo \"📋 Collecting Playwright MCP logs...\"\n\n# Create logs directory\nmkdir -p /tmp/gh-aw/playwright-debug-logs\n\n# Copy any playwright logs from the MCP logs directory\nif [ -d \"/tmp/gh-aw/mcp-logs/playwright\" ]; then\n echo \"Found Playwright MCP logs directory\"\n cp -r /tmp/gh-aw/mcp-logs/playwright/* /tmp/gh-aw/playwright-debug-logs/ 2>/dev/null || true\n ls -la /tmp/gh-aw/playwright-debug-logs/\nelse\n echo \"No Playwright MCP logs directory found at /tmp/gh-aw/mcp-logs/playwright\"\nfi\n\n# List all trace files if any\necho \"Looking for trace files...\"\nfind /tmp -name \"*.zip\" -o -name \"trace*\" 2>/dev/null | head -20 || true\n\n# Show docker container logs if any containers are still running\necho \"Checking for running Docker containers...\"\ndocker ps -a --format \"table {{.Names}}\\t{{.Status}}\\t{{.Image}}\" 2>/dev/null || true\n"
- if: always()
name: Upload Playwright Debug Logs
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: ignore
name: playwright-debug-logs-${{ github.run_id }}
diff --git a/.github/workflows/smoke-detector.lock.yml b/.github/workflows/smoke-detector.lock.yml
index 4cfccf299c4..82f4620e81d 100644
--- a/.github/workflows/smoke-detector.lock.yml
+++ b/.github/workflows/smoke-detector.lock.yml
@@ -595,7 +595,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml
index 4ece510b61b..e69cb06ecc1 100644
--- a/.github/workflows/static-analysis-report.lock.yml
+++ b/.github/workflows/static-analysis-report.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml
index 8e569416580..fa2948866cc 100644
--- a/.github/workflows/super-linter.lock.yml
+++ b/.github/workflows/super-linter.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Download super-linter log
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: super-linter-log
path: /tmp/gh-aw/
@@ -7536,13 +7536,13 @@ jobs:
steps:
- name: Checkout Code
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
fetch-depth: 0
persist-credentials: false
- name: Super-linter
id: super-linter
- uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.2.1
+ uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.3.1
env:
CREATE_LOG_FILE: "true"
DEFAULT_BRANCH: main
@@ -7564,7 +7564,7 @@ jobs:
fi
- name: Upload super-linter log
if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: super-linter-log
path: super-linter.log
diff --git a/.github/workflows/tidy.lock.yml b/.github/workflows/tidy.lock.yml
index 09887cdcb0b..849c95c24e4 100644
--- a/.github/workflows/tidy.lock.yml
+++ b/.github/workflows/tidy.lock.yml
@@ -577,13 +577,13 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
cache: npm
cache-dependency-path: pkg/workflow/js/package-lock.json
node-version: "24"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/pkg/cli/fix_codemods.go b/pkg/cli/fix_codemods.go
index eb334adc17b..afb744e141b 100644
--- a/pkg/cli/fix_codemods.go
+++ b/pkg/cli/fix_codemods.go
@@ -45,6 +45,7 @@ func GetAllCodemods() []Codemod {
getNetworkFirewallCodemod(),
getCommandToSlashCommandCodemod(),
getSafeInputsModeCodemod(),
+ getUploadAssetsCodemod(),
}
}
@@ -488,3 +489,101 @@ func getSafeInputsModeCodemod() Codemod {
},
}
}
+
+// getUploadAssetsCodemod creates a codemod for migrating upload-assets to upload-asset (plural to singular)
+func getUploadAssetsCodemod() Codemod {
+ return Codemod{
+ ID: "upload-assets-to-upload-asset-migration",
+ Name: "Migrate upload-assets to upload-asset",
+ Description: "Replaces deprecated 'safe-outputs.upload-assets' field with 'safe-outputs.upload-asset' (plural to singular)",
+ IntroducedIn: "0.3.0",
+ Apply: func(content string, frontmatter map[string]any) (string, bool, error) {
+ // Check if safe-outputs.upload-assets exists
+ safeOutputsValue, hasSafeOutputs := frontmatter["safe-outputs"]
+ if !hasSafeOutputs {
+ return content, false, nil
+ }
+
+ safeOutputsMap, ok := safeOutputsValue.(map[string]any)
+ if !ok {
+ return content, false, nil
+ }
+
+ // Check if upload-assets field exists in safe-outputs (plural is deprecated)
+ _, hasUploadAssets := safeOutputsMap["upload-assets"]
+ if !hasUploadAssets {
+ return content, false, nil
+ }
+
+ // Parse frontmatter to get raw lines
+ result, err := parser.ExtractFrontmatterFromContent(content)
+ if err != nil {
+ return content, false, fmt.Errorf("failed to parse frontmatter: %w", err)
+ }
+
+ // Find and replace upload-assets with upload-asset within the safe-outputs block
+ var modified bool
+ var inSafeOutputsBlock bool
+ var safeOutputsIndent string
+
+ frontmatterLines := make([]string, len(result.FrontmatterLines))
+
+ for i, line := range result.FrontmatterLines {
+ trimmedLine := strings.TrimSpace(line)
+
+ // Track if we're in the safe-outputs block
+ if strings.HasPrefix(trimmedLine, "safe-outputs:") {
+ inSafeOutputsBlock = true
+ safeOutputsIndent = line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+ frontmatterLines[i] = line
+ continue
+ }
+
+ // Check if we've left the safe-outputs block (new top-level key with same or less indentation)
+ if inSafeOutputsBlock && len(trimmedLine) > 0 && !strings.HasPrefix(trimmedLine, "#") {
+ currentIndent := line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+ if len(currentIndent) <= len(safeOutputsIndent) && strings.Contains(line, ":") {
+ inSafeOutputsBlock = false
+ }
+ }
+
+ // Replace upload-assets with upload-asset if in safe-outputs block
+ if inSafeOutputsBlock && strings.HasPrefix(trimmedLine, "upload-assets:") {
+ // Preserve indentation
+ leadingSpace := line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+
+ // Extract the value and any trailing comment
+ parts := strings.SplitN(line, ":", 2)
+ if len(parts) >= 2 {
+ valueAndComment := parts[1]
+ frontmatterLines[i] = fmt.Sprintf("%supload-asset:%s", leadingSpace, valueAndComment)
+ modified = true
+ codemodsLog.Printf("Replaced safe-outputs.upload-assets with safe-outputs.upload-asset on line %d", i+1)
+ } else {
+ frontmatterLines[i] = line
+ }
+ } else {
+ frontmatterLines[i] = line
+ }
+ }
+
+ if !modified {
+ return content, false, nil
+ }
+
+ // Reconstruct the content
+ var lines []string
+ lines = append(lines, "---")
+ lines = append(lines, frontmatterLines...)
+ lines = append(lines, "---")
+ if result.Markdown != "" {
+ lines = append(lines, "")
+ lines = append(lines, result.Markdown)
+ }
+
+ newContent := strings.Join(lines, "\n")
+ codemodsLog.Print("Applied upload-assets to upload-asset migration")
+ return newContent, true, nil
+ },
+ }
+}
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 6957252fb12..67f5ccbc336 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -4196,7 +4196,7 @@
}
]
},
- "upload-assets": {
+ "upload-asset": {
"oneOf": [
{
"type": "object",
diff --git a/pkg/workflow/checkout_persist_credentials_test.go b/pkg/workflow/checkout_persist_credentials_test.go
index 2151f776fd5..89af95635a2 100644
--- a/pkg/workflow/checkout_persist_credentials_test.go
+++ b/pkg/workflow/checkout_persist_credentials_test.go
@@ -99,7 +99,7 @@ permissions:
issues: read
pull-requests: read
safe-outputs:
- upload-assets:
+ upload-asset:
engine: claude
strict: false
---`,
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index aa50fde12ba..c61327277a4 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -295,7 +295,7 @@ type SafeOutputsConfig struct {
UpdateIssues *UpdateIssuesConfig `yaml:"update-issues,omitempty"`
UpdatePullRequests *UpdatePullRequestsConfig `yaml:"update-pull-request,omitempty"` // Update GitHub pull request title/body
PushToPullRequestBranch *PushToPullRequestBranchConfig `yaml:"push-to-pull-request-branch,omitempty"`
- UploadAssets *UploadAssetsConfig `yaml:"upload-assets,omitempty"`
+ UploadAssets *UploadAssetsConfig `yaml:"upload-asset,omitempty"`
UpdateRelease *UpdateReleaseConfig `yaml:"update-release,omitempty"` // Update GitHub release descriptions
CreateAgentTasks *CreateAgentTaskConfig `yaml:"create-agent-task,omitempty"` // Create GitHub Copilot agent tasks
UpdateProjects *UpdateProjectConfig `yaml:"update-project,omitempty"` // Smart project board management (create/add/update)
diff --git a/pkg/workflow/imports.go b/pkg/workflow/imports.go
index a872b8383ab..531bb0a1ca5 100644
--- a/pkg/workflow/imports.go
+++ b/pkg/workflow/imports.go
@@ -456,7 +456,7 @@ func hasSafeOutputType(config *SafeOutputsConfig, key string) bool {
return config.UpdatePullRequests != nil
case "push-to-pull-request-branch":
return config.PushToPullRequestBranch != nil
- case "upload-assets":
+ case "upload-asset":
return config.UploadAssets != nil
case "update-release":
return config.UpdateRelease != nil
diff --git a/pkg/workflow/js/upload_assets.cjs b/pkg/workflow/js/upload_assets.cjs
index ee71a03ac38..7c37a420582 100644
--- a/pkg/workflow/js/upload_assets.cjs
+++ b/pkg/workflow/js/upload_assets.cjs
@@ -76,13 +76,16 @@ async function main() {
return;
}
- // Find all upload-assets items
- const uploadItems = result.items.filter(/** @param {any} item */ item => item.type === "upload_assets");
+ // Find all upload-asset items (singular is the standard)
+ const uploadItems = result.items.filter(/** @param {any} item */ item => item.type === "upload_asset");
- // Also check for legacy upload-asset items
- const uploadAssetItems = result.items.filter(/** @param {any} item */ item => item.type === "upload_asset");
+ // Handle legacy upload-assets (plural, hyphenated) with warning
+ const legacyUploadAssetsItems = result.items.filter(/** @param {any} item */ item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
diff --git a/pkg/workflow/js/upload_assets.test.cjs b/pkg/workflow/js/upload_assets.test.cjs
index e87087c5357..c6210d5294a 100644
--- a/pkg/workflow/js/upload_assets.test.cjs
+++ b/pkg/workflow/js/upload_assets.test.cjs
@@ -62,6 +62,25 @@ const mockCore = { debug: vi.fn(), info: vi.fn(), notice: vi.fn(), warning: vi.f
(expect(branchNameCall).toBeDefined(), expect(branchNameCall[1]).toBe("assets/my-branch"));
});
}),
+ describe("legacy upload-assets type handling", () => {
+ it("should warn about legacy upload-assets type (plural hyphenated)", async () => {
+ (process.env.GH_AW_ASSETS_BRANCH = "assets/test-workflow"), (process.env.GH_AW_SAFE_OUTPUTS_STAGED = "false");
+ const assetDir = "/tmp/gh-aw/safeoutputs/assets";
+ fs.existsSync(assetDir) || fs.mkdirSync(assetDir, { recursive: !0 });
+ const assetPath = path.join(assetDir, "test.png");
+ fs.writeFileSync(assetPath, "fake png data");
+ const crypto = require("crypto"),
+ fileContent = fs.readFileSync(assetPath),
+ agentOutput = {
+ items: [{ type: "upload-assets", fileName: "test.png", sha: crypto.createHash("sha256").update(fileContent).digest("hex"), size: fileContent.length, targetFileName: "test.png", url: "https://example.com/test.png" }],
+ };
+ setAgentOutput(agentOutput);
+ (mockExec.exec.mockImplementation(async () => 0), await executeScript());
+ const warningCalls = mockCore.warning.mock.calls.filter(call => call[0].includes("legacy type"));
+ (expect(warningCalls.length).toBeGreaterThan(0), expect(warningCalls[0][0]).toContain("upload-assets"), expect(warningCalls[0][0]).toContain("deprecated"));
+ fs.existsSync(assetPath) && fs.unlinkSync(assetPath);
+ });
+ }),
describe("branch prefix validation", () => {
(it("should allow creating orphaned branch with 'assets/' prefix when branch doesn't exist", async () => {
(fs.existsSync("test.png") && fs.unlinkSync("test.png"), (process.env.GH_AW_ASSETS_BRANCH = "assets/test-workflow"), (process.env.GH_AW_SAFE_OUTPUTS_STAGED = "false"));
diff --git a/pkg/workflow/publish_assets.go b/pkg/workflow/publish_assets.go
index 51866335a1c..e3fd2364de0 100644
--- a/pkg/workflow/publish_assets.go
+++ b/pkg/workflow/publish_assets.go
@@ -19,8 +19,8 @@ type UploadAssetsConfig struct {
// parseUploadAssetConfig handles upload-asset configuration
func (c *Compiler) parseUploadAssetConfig(outputMap map[string]any) *UploadAssetsConfig {
- if configData, exists := outputMap["upload-assets"]; exists {
- publishAssetsLog.Print("Parsing upload-assets configuration")
+ if configData, exists := outputMap["upload-asset"]; exists {
+ publishAssetsLog.Print("Parsing upload-asset configuration")
config := &UploadAssetsConfig{
BranchName: "assets/${{ github.workflow }}", // Default branch name
MaxSizeKB: 10240, // Default 10MB
@@ -64,10 +64,10 @@ func (c *Compiler) parseUploadAssetConfig(outputMap map[string]any) *UploadAsset
// Parse common base fields with default max of 0 (no limit)
c.parseBaseSafeOutputConfig(configMap, &config.BaseSafeOutputConfig, 0)
- publishAssetsLog.Printf("Parsed upload-assets config: branch=%s, max_size_kb=%d, allowed_exts=%d", config.BranchName, config.MaxSizeKB, len(config.AllowedExts))
+ publishAssetsLog.Printf("Parsed upload-asset config: branch=%s, max_size_kb=%d, allowed_exts=%d", config.BranchName, config.MaxSizeKB, len(config.AllowedExts))
} else if configData == nil {
// Handle null case: create config with defaults
- publishAssetsLog.Print("Using default upload-assets configuration")
+ publishAssetsLog.Print("Using default upload-asset configuration")
return config
}
diff --git a/pkg/workflow/publish_assets_test.go b/pkg/workflow/publish_assets_test.go
index 880cb76430c..c9f28b0f6f5 100644
--- a/pkg/workflow/publish_assets_test.go
+++ b/pkg/workflow/publish_assets_test.go
@@ -16,7 +16,7 @@ func TestParseUploadAssetConfig(t *testing.T) {
{
name: "upload-asset config with custom values",
input: map[string]any{
- "upload-assets": map[string]any{
+ "upload-asset": map[string]any{
"branch": "my-assets/${{ github.event.repository.name }}",
"max-size": 5120,
"allowed-exts": []any{".jpg", ".png", ".txt"},
@@ -33,7 +33,7 @@ func TestParseUploadAssetConfig(t *testing.T) {
{
name: "upload-asset config with max",
input: map[string]any{
- "upload-assets": map[string]any{
+ "upload-asset": map[string]any{
"max": 5,
},
},
diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go
index 3e05640523d..65561602280 100644
--- a/pkg/workflow/safe_outputs_config.go
+++ b/pkg/workflow/safe_outputs_config.go
@@ -130,7 +130,7 @@ func GetEnabledSafeOutputToolNames(safeOutputs *SafeOutputsConfig) []string {
tools = append(tools, "push_to_pull_request_branch")
}
if safeOutputs.UploadAssets != nil {
- tools = append(tools, "upload_assets")
+ tools = append(tools, "upload_asset")
}
if safeOutputs.UpdateRelease != nil {
tools = append(tools, "update_release")
diff --git a/pkg/workflow/upload_assets_config_test.go b/pkg/workflow/upload_assets_config_test.go
index 19190b02de6..ca4bd007c58 100644
--- a/pkg/workflow/upload_assets_config_test.go
+++ b/pkg/workflow/upload_assets_config_test.go
@@ -9,7 +9,7 @@ func TestUploadAssetsConfigDefaults(t *testing.T) {
// Test default configuration
outputMap := map[string]any{
- "upload-assets": nil,
+ "upload-asset": nil,
}
config := compiler.parseUploadAssetConfig(outputMap)
@@ -40,7 +40,7 @@ func TestUploadAssetsConfigCustomExtensions(t *testing.T) {
// Test custom configuration like dev.md
outputMap := map[string]any{
- "upload-assets": map[string]any{
+ "upload-asset": map[string]any{
"allowed-exts": []any{".txt"},
"max-size": 1024,
},
From f16295eec93b1f3d0272ac5b2c329340213d0b06 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 17:13:44 +0000
Subject: [PATCH 04/11] Apply codemod and update all references to upload-asset
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/aw/schemas/agentic-workflow.json | 2 +-
.github/workflows/ai-moderator.lock.yml | 2 +-
.github/workflows/audit-workflows.lock.yml | 11 +-
.github/workflows/audit-workflows.md | 4 +-
.../copilot-pr-nlp-analysis.lock.yml | 194 ++++++++++++++++-
.../copilot-session-insights.lock.yml | 11 +-
.github/workflows/copilot-session-insights.md | 5 +-
.github/workflows/daily-code-metrics.lock.yml | 4 +-
.../daily-copilot-token-report.lock.yml | 11 +-
.../workflows/daily-copilot-token-report.md | 4 +-
.github/workflows/daily-file-diet.lock.yml | 196 +++++++++++++++++-
.../workflows/daily-firewall-report.lock.yml | 11 +-
.github/workflows/daily-firewall-report.md | 4 +-
.../workflows/daily-issues-report.lock.yml | 11 +-
.github/workflows/daily-issues-report.md | 4 +-
.../daily-multi-device-docs-tester.lock.yml | 11 +-
.../daily-multi-device-docs-tester.md | 4 +-
.github/workflows/daily-news.lock.yml | 11 +-
.github/workflows/daily-news.md | 2 +-
.../daily-performance-summary.lock.yml | 11 +-
.../workflows/daily-performance-summary.md | 4 +-
.../workflows/daily-repo-chronicle.lock.yml | 11 +-
.github/workflows/daily-repo-chronicle.md | 2 +-
.github/workflows/deep-report.lock.yml | 11 +-
.github/workflows/deep-report.md | 4 +-
.github/workflows/dev-hawk.lock.yml | 2 +-
.github/workflows/docs-noob-tester.lock.yml | 11 +-
.github/workflows/docs-noob-tester.md | 4 +-
.github/workflows/firewall-escape.lock.yml | 2 +-
.../github-mcp-structural-analysis.lock.yml | 194 ++++++++++++++++-
.../workflows/go-pattern-detector.lock.yml | 2 +-
.github/workflows/intelligence.lock.yml | 194 ++++++++++++++++-
.github/workflows/issue-classifier.lock.yml | 2 +-
.github/workflows/issue-monster.lock.yml | 2 +-
.github/workflows/mcp-inspector.lock.yml | 4 +-
.../workflows/notion-issue-summary.lock.yml | 2 +-
.github/workflows/org-health-report.lock.yml | 11 +-
.github/workflows/org-health-report.md | 4 +-
.github/workflows/poem-bot.lock.yml | 11 +-
.github/workflows/poem-bot.md | 4 +-
.github/workflows/portfolio-analyst.lock.yml | 11 +-
.github/workflows/portfolio-analyst.md | 4 +-
.../prompt-clustering-analysis.lock.yml | 6 +-
.github/workflows/python-data-charts.lock.yml | 11 +-
.github/workflows/python-data-charts.md | 5 +-
.github/workflows/release.lock.yml | 14 +-
.github/workflows/safe-output-health.lock.yml | 2 +-
.github/workflows/shared/python-dataviz.md | 2 +-
.../workflows/slide-deck-maintainer.lock.yml | 2 +-
.../smoke-copilot-playwright.lock.yml | 2 +-
.github/workflows/smoke-detector.lock.yml | 2 +-
.../workflows/stale-repo-identifier.lock.yml | 11 +-
.github/workflows/stale-repo-identifier.md | 4 +-
.../workflows/static-analysis-report.lock.yml | 2 +-
.github/workflows/super-linter.lock.yml | 8 +-
.../workflows/technical-doc-writer.lock.yml | 11 +-
.github/workflows/technical-doc-writer.md | 4 +-
.github/workflows/tidy.lock.yml | 4 +-
.github/workflows/unbloat-docs.lock.yml | 11 +-
.github/workflows/unbloat-docs.md | 4 +-
.../workflows/weekly-issue-summary.lock.yml | 11 +-
.github/workflows/weekly-issue-summary.md | 2 +-
docs/slides/index.md | 2 +-
.../docs/reference/frontmatter-full.md | 4 +-
docs/src/content/docs/reference/glossary.md | 2 +-
.../content/docs/reference/safe-outputs.md | 10 +-
.../test-claude-playwright-screenshots.md | 2 +-
.../test-copilot-playwright-screenshots.md | 2 +-
pkg/cli/workflows/test-playwright-args.md | 2 +-
pkg/workflow/js/upload_assets.test.cjs | 2 +-
specs/safe-output-environment-variables.md | 2 +-
71 files changed, 973 insertions(+), 182 deletions(-)
diff --git a/.github/aw/schemas/agentic-workflow.json b/.github/aw/schemas/agentic-workflow.json
index 6957252fb12..67f5ccbc336 100644
--- a/.github/aw/schemas/agentic-workflow.json
+++ b/.github/aw/schemas/agentic-workflow.json
@@ -4196,7 +4196,7 @@
}
]
},
- "upload-assets": {
+ "upload-asset": {
"oneOf": [
{
"type": "object",
diff --git a/.github/workflows/ai-moderator.lock.yml b/.github/workflows/ai-moderator.lock.yml
index cc1e3a190ba..c46dc6c72a2 100644
--- a/.github/workflows/ai-moderator.lock.yml
+++ b/.github/workflows/ai-moderator.lock.yml
@@ -5800,7 +5800,7 @@ jobs:
- name: Check if actor is external user or GitHub Action bot
id: check_actor
if: ${{ github.event_name != 'workflow_dispatch' }}
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |-
const actor = context.actor;
diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml
index f0ea67fbacf..2abf5ef322b 100644
--- a/.github/workflows/audit-workflows.lock.yml
+++ b/.github/workflows/audit-workflows.lock.yml
@@ -2380,7 +2380,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -7631,9 +7631,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/audit-workflows.md b/.github/workflows/audit-workflows.md
index 208d0349fe2..15149cd0629 100644
--- a/.github/workflows/audit-workflows.md
+++ b/.github/workflows/audit-workflows.md
@@ -19,7 +19,7 @@ steps:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: ./gh-aw logs --start-date -1d -o /tmp/gh-aw/aw-mcp/logs
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "audits"
max: 1
@@ -160,4 +160,4 @@ Use gh-aw MCP server (not CLI directly). Run `status` tool to verify.
Cache structure: `/tmp/gh-aw/cache-memory/{audits,patterns,metrics}/*.json`
-Always create discussion with findings and update cache memory.
+Always create discussion with findings and update cache memory.
\ No newline at end of file
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index fd6766dea38..1ed8416218f 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -147,6 +147,9 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -174,7 +177,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: data-charts
@@ -182,7 +185,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: python-source-and-data
@@ -348,7 +351,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
+ {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -378,6 +381,23 @@ jobs:
},
"name": "create_discussion"
},
+ {
+ "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
+ "inputSchema": {
+ "additionalProperties": false,
+ "properties": {
+ "path": {
+ "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "path"
+ ],
+ "type": "object"
+ },
+ "name": "upload_asset"
+ },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -483,6 +503,15 @@ jobs:
"maxLength": 65000
}
}
+ },
+ "upload_asset": {
+ "defaultMax": 10,
+ "fields": {
+ "path": {
+ "required": true,
+ "type": "string"
+ }
+ }
}
}
EOF
@@ -1823,6 +1852,9 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
+ GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
mkdir -p /home/runner/.copilot
@@ -2902,7 +2934,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3199,6 +3231,9 @@ jobs:
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -6236,6 +6271,13 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ - name: Upload safe outputs assets
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7245,10 +7287,13 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: read
+ contents: write
discussions: write
timeout-minutes: 15
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "copilot-pr-nlp-analysis"
GH_AW_WORKFLOW_NAME: "Copilot PR Conversation NLP Analysis"
@@ -8432,6 +8477,145 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
+ - name: Upload Assets
+ id: upload_assets
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ globalThis.github = github;
+ globalThis.context = context;
+ globalThis.core = core;
+ globalThis.exec = exec;
+ globalThis.io = io;
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
+ if (allUploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of allUploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of allUploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ (async () => { await main(); })();
update_cache_memory:
needs:
diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml
index 4ff3186d91d..bf89f941069 100644
--- a/.github/workflows/copilot-session-insights.lock.yml
+++ b/.github/workflows/copilot-session-insights.lock.yml
@@ -3368,7 +3368,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8616,9 +8616,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/copilot-session-insights.md b/.github/workflows/copilot-session-insights.md
index 098a9006679..3f719e923b4 100644
--- a/.github/workflows/copilot-session-insights.md
+++ b/.github/workflows/copilot-session-insights.md
@@ -23,7 +23,7 @@ network:
- python
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
title-prefix: "[copilot-session-insights] "
category: "audits"
@@ -743,5 +743,4 @@ A successful analysis includes:
---
-Begin your analysis by verifying the downloaded session data, loading historical context from cache memory, and proceeding through the analysis phases systematically.
-
+Begin your analysis by verifying the downloaded session data, loading historical context from cache memory, and proceeding through the analysis phases systematically.
\ No newline at end of file
diff --git a/.github/workflows/daily-code-metrics.lock.yml b/.github/workflows/daily-code-metrics.lock.yml
index 9d245b2b8d8..6a86afdc39a 100644
--- a/.github/workflows/daily-code-metrics.lock.yml
+++ b/.github/workflows/daily-code-metrics.lock.yml
@@ -170,7 +170,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-charts
@@ -178,7 +178,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml
index 47862800598..883ef912c39 100644
--- a/.github/workflows/daily-copilot-token-report.lock.yml
+++ b/.github/workflows/daily-copilot-token-report.lock.yml
@@ -3021,7 +3021,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8614,9 +8614,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/daily-copilot-token-report.md b/.github/workflows/daily-copilot-token-report.md
index de5059e85e8..ed05facd3bd 100644
--- a/.github/workflows/daily-copilot-token-report.md
+++ b/.github/workflows/daily-copilot-token-report.md
@@ -34,7 +34,7 @@ steps:
exit 1
fi
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "audits"
@@ -663,4 +663,4 @@ Your output MUST:
7. Store current day's metrics in cache memory for future trend tracking
8. Use the collapsible details format from the reporting.md import
-Begin your analysis now. The logs have been pre-downloaded to `/tmp/gh-aw/copilot-logs.json` - process the data systematically, generate insightful visualizations, and create a comprehensive report that helps optimize Copilot token consumption across all workflows.
+Begin your analysis now. The logs have been pre-downloaded to `/tmp/gh-aw/copilot-logs.json` - process the data systematically, generate insightful visualizations, and create a comprehensive report that helps optimize Copilot token consumption across all workflows.
\ No newline at end of file
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index e2579ee11c2..2fdc4fc5dd5 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -149,6 +149,9 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -186,7 +189,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: data-charts
@@ -194,7 +197,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: python-source-and-data
@@ -376,7 +379,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
+ {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -420,6 +423,23 @@ jobs:
},
"name": "create_issue"
},
+ {
+ "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
+ "inputSchema": {
+ "additionalProperties": false,
+ "properties": {
+ "path": {
+ "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "path"
+ ],
+ "type": "object"
+ },
+ "name": "upload_asset"
+ },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -532,6 +552,15 @@ jobs:
"maxLength": 65000
}
}
+ },
+ "upload_asset": {
+ "defaultMax": 10,
+ "fields": {
+ "path": {
+ "required": true,
+ "type": "string"
+ }
+ }
}
}
EOF
@@ -1872,6 +1901,9 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
+ GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
mkdir -p /home/runner/.copilot
@@ -2939,7 +2971,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3237,6 +3269,9 @@ jobs:
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -6283,6 +6318,13 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ - name: Upload safe outputs assets
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7715,10 +7757,13 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: read
+ contents: write
issues: write
timeout-minutes: 15
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-file-diet"
GH_AW_WORKFLOW_ID: "daily-file-diet"
@@ -7748,7 +7793,7 @@ jobs:
owner: ${{ github.repository_owner }}
repositories: ${{ github.event.repository.name }}
github-api-url: ${{ github.api_url }}
- permission-contents: read
+ permission-contents: write
permission-issues: write
- name: Setup JavaScript files
id: setup_scripts
@@ -8688,6 +8733,145 @@ jobs:
(async () => {
await main();
})();
+ - name: Upload Assets
+ id: upload_assets
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ with:
+ github-token: ${{ steps.app-token.outputs.token }}
+ script: |
+ globalThis.github = github;
+ globalThis.context = context;
+ globalThis.core = core;
+ globalThis.exec = exec;
+ globalThis.io = io;
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
+ if (allUploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of allUploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of allUploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ (async () => { await main(); })();
- name: Invalidate GitHub App token
if: always() && steps.app-token.outputs.token != ''
env:
diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index 5ca4d84ac14..71e54979989 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -2502,7 +2502,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8297,9 +8297,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/daily-firewall-report.md b/.github/workflows/daily-firewall-report.md
index d1dfc0005d3..2975ae9f5c0 100644
--- a/.github/workflows/daily-firewall-report.md
+++ b/.github/workflows/daily-firewall-report.md
@@ -16,7 +16,7 @@ tracker-id: daily-firewall-report
timeout-minutes: 45
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "audits"
@@ -330,4 +330,4 @@ Create a new GitHub discussion with:
## Expected Output
-A GitHub discussion in the "audits" category containing a comprehensive daily firewall analysis report.
+A GitHub discussion in the "audits" category containing a comprehensive daily firewall analysis report.
\ No newline at end of file
diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml
index 31014292aa7..2241c5af6a9 100644
--- a/.github/workflows/daily-issues-report.lock.yml
+++ b/.github/workflows/daily-issues-report.lock.yml
@@ -3115,7 +3115,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -9171,9 +9171,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/daily-issues-report.md b/.github/workflows/daily-issues-report.md
index c3d430e0254..93b64fbaa97 100644
--- a/.github/workflows/daily-issues-report.md
+++ b/.github/workflows/daily-issues-report.md
@@ -14,7 +14,7 @@ tools:
github:
toolsets: [default, discussions]
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "General"
@@ -350,4 +350,4 @@ A successful run will:
- ✅ Create a new discussion with comprehensive report
- ✅ Include all required metrics and visualizations
-Begin your analysis now. Load the data, run the Python analysis, generate charts, and create the discussion report.
+Begin your analysis now. Load the data, run the Python analysis, generate charts, and create the discussion report.
\ No newline at end of file
diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml
index c09b1ee3554..a40f2665ece 100644
--- a/.github/workflows/daily-multi-device-docs-tester.lock.yml
+++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml
@@ -2173,7 +2173,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -7231,9 +7231,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/daily-multi-device-docs-tester.md b/.github/workflows/daily-multi-device-docs-tester.md
index 2c9623f7ff3..d04dd206582 100644
--- a/.github/workflows/daily-multi-device-docs-tester.md
+++ b/.github/workflows/daily-multi-device-docs-tester.md
@@ -34,7 +34,7 @@ tools:
- "pwd*" # Print working directory
- "cd*" # Change directory
safe-outputs:
- upload-assets:
+ upload-asset:
create-issue:
network:
@@ -136,4 +136,4 @@ rm -f /tmp/server.pid
## Summary
-Provide: total devices tested, test results (passed/failed/warnings), key findings, and link to issue (if created).
+Provide: total devices tested, test results (passed/failed/warnings), key findings, and link to issue (if created).
\ No newline at end of file
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index d685321a271..8621af76077 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -2825,7 +2825,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8419,9 +8419,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/daily-news.md b/.github/workflows/daily-news.md
index 6be3809631f..bbb589fd812 100644
--- a/.github/workflows/daily-news.md
+++ b/.github/workflows/daily-news.md
@@ -27,7 +27,7 @@ network:
sandbox:
agent: awf # Firewall enabled (migrated from network.firewall)
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "daily-news"
diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml
index eb18403b7a6..02376d1ded0 100644
--- a/.github/workflows/daily-performance-summary.lock.yml
+++ b/.github/workflows/daily-performance-summary.lock.yml
@@ -4327,7 +4327,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -10242,9 +10242,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/daily-performance-summary.md b/.github/workflows/daily-performance-summary.md
index 36728bfa6d9..7963a0db970 100644
--- a/.github/workflows/daily-performance-summary.md
+++ b/.github/workflows/daily-performance-summary.md
@@ -16,7 +16,7 @@ tools:
github:
toolsets: [default, discussions]
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "General"
@@ -477,4 +477,4 @@ This workflow uses safe-input tools imported from `shared/github-queries-safe-in
3. Tools are authenticated with `GITHUB_TOKEN` for GitHub API access
4. Call tools with parameters like: `github-pr-query with state: "all", limit: 1000, jq: "."`
-Begin your analysis now. **Use the safe-input tools** to gather data, run Python analysis, generate charts, and create the discussion report.
+Begin your analysis now. **Use the safe-input tools** to gather data, run Python analysis, generate charts, and create the discussion report.
\ No newline at end of file
diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml
index e34aee6917a..cc32a5433d7 100644
--- a/.github/workflows/daily-repo-chronicle.lock.yml
+++ b/.github/workflows/daily-repo-chronicle.lock.yml
@@ -2747,7 +2747,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8340,9 +8340,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/daily-repo-chronicle.md b/.github/workflows/daily-repo-chronicle.md
index feb1219fc78..bcd833becfe 100644
--- a/.github/workflows/daily-repo-chronicle.md
+++ b/.github/workflows/daily-repo-chronicle.md
@@ -30,7 +30,7 @@ tools:
- default
- discussions
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
title-prefix: "📰 "
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index 429babad867..75a3cc18e08 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -2595,7 +2595,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8339,9 +8339,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/deep-report.md b/.github/workflows/deep-report.md
index 3443fe24a24..2ee538db23f 100644
--- a/.github/workflows/deep-report.md
+++ b/.github/workflows/deep-report.md
@@ -26,7 +26,7 @@ network:
- node
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "reports"
max: 1
@@ -325,4 +325,4 @@ List all reports and data sources analyzed:
- Be **objective** - report both positive and negative trends
- **Cite sources** for all major claims
-Create a new GitHub discussion titled "DeepReport Intelligence Briefing - [Today's Date]" in the "reports" category with your analysis.
+Create a new GitHub discussion titled "DeepReport Intelligence Briefing - [Today's Date]" in the "reports" category with your analysis.
\ No newline at end of file
diff --git a/.github/workflows/dev-hawk.lock.yml b/.github/workflows/dev-hawk.lock.yml
index c5057a15c84..20965099166 100644
--- a/.github/workflows/dev-hawk.lock.yml
+++ b/.github/workflows/dev-hawk.lock.yml
@@ -174,7 +174,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml
index b7d5d6e946e..e586c872738 100644
--- a/.github/workflows/docs-noob-tester.lock.yml
+++ b/.github/workflows/docs-noob-tester.lock.yml
@@ -2224,7 +2224,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -7807,9 +7807,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/docs-noob-tester.md b/.github/workflows/docs-noob-tester.md
index 3c0a2e689f4..578f79b5f78 100644
--- a/.github/workflows/docs-noob-tester.md
+++ b/.github/workflows/docs-noob-tester.md
@@ -16,7 +16,7 @@ tools:
bash:
- "*"
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "General"
close-older-discussions: true
@@ -186,4 +186,4 @@ You've successfully completed this task if you:
- Navigated at least 5 key documentation pages
- Identified specific pain points with examples
- Provided actionable recommendations
-- Created a discussion with clear findings and screenshots
+- Created a discussion with clear findings and screenshots
\ No newline at end of file
diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml
index 83c3e846026..85fe48f5989 100644
--- a/.github/workflows/firewall-escape.lock.yml
+++ b/.github/workflows/firewall-escape.lock.yml
@@ -2978,7 +2978,7 @@ jobs:
steps:
- name: Create issue on test failure
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
await github.rest.issues.create({
diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml
index 72803a61065..7ae97f350f3 100644
--- a/.github/workflows/github-mcp-structural-analysis.lock.yml
+++ b/.github/workflows/github-mcp-structural-analysis.lock.yml
@@ -147,6 +147,9 @@ jobs:
concurrency:
group: "gh-aw-claude-${{ github.workflow }}"
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -172,7 +175,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: data-charts
@@ -180,7 +183,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: python-source-and-data
@@ -330,7 +333,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
+ {"create_discussion":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -360,6 +363,23 @@ jobs:
},
"name": "create_discussion"
},
+ {
+ "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
+ "inputSchema": {
+ "additionalProperties": false,
+ "properties": {
+ "path": {
+ "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "path"
+ ],
+ "type": "object"
+ },
+ "name": "upload_asset"
+ },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -465,6 +485,15 @@ jobs:
"maxLength": 65000
}
}
+ },
+ "upload_asset": {
+ "defaultMax": 10,
+ "fields": {
+ "path": {
+ "required": true,
+ "type": "string"
+ }
+ }
}
}
EOF
@@ -1805,6 +1834,9 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
+ GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
cat > /tmp/gh-aw/mcp-config/mcp-servers.json << EOF
@@ -2690,7 +2722,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3060,6 +3092,9 @@ jobs:
DISABLE_BUG_COMMAND: 1
DISABLE_ERROR_REPORTING: 1
DISABLE_TELEMETRY: 1
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
GH_AW_MODEL_AGENT_CLAUDE: ${{ vars.GH_AW_MODEL_AGENT_CLAUDE || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -5675,6 +5710,13 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ - name: Upload safe outputs assets
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -6690,10 +6732,13 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: read
+ contents: write
discussions: write
timeout-minutes: 15
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_WORKFLOW_ID: "github-mcp-structural-analysis"
GH_AW_WORKFLOW_NAME: "GitHub MCP Structural Analysis"
@@ -7877,6 +7922,145 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
+ - name: Upload Assets
+ id: upload_assets
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ globalThis.github = github;
+ globalThis.context = context;
+ globalThis.core = core;
+ globalThis.exec = exec;
+ globalThis.io = io;
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
+ if (allUploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of allUploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of allUploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ (async () => { await main(); })();
update_cache_memory:
needs:
diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml
index d993ca381af..778f79869f5 100644
--- a/.github/workflows/go-pattern-detector.lock.yml
+++ b/.github/workflows/go-pattern-detector.lock.yml
@@ -5380,7 +5380,7 @@ jobs:
found_patterns: ${{ steps.detect.outputs.found_patterns }}
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
persist-credentials: false
- name: Install ast-grep
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index 664d8ac0dce..74ee92ba9cd 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -154,6 +154,9 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -179,7 +182,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: data-charts
@@ -187,7 +190,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: python-source-and-data
@@ -369,7 +372,7 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
+ {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":0}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
@@ -413,6 +416,23 @@ jobs:
},
"name": "create_issue"
},
+ {
+ "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum file size: 10240KB. Allowed file extensions: [.png .jpg .jpeg].",
+ "inputSchema": {
+ "additionalProperties": false,
+ "properties": {
+ "path": {
+ "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "path"
+ ],
+ "type": "object"
+ },
+ "name": "upload_asset"
+ },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -525,6 +545,15 @@ jobs:
"maxLength": 65000
}
}
+ },
+ "upload_asset": {
+ "defaultMax": 10,
+ "fields": {
+ "path": {
+ "required": true,
+ "type": "string"
+ }
+ }
}
}
EOF
@@ -1865,6 +1894,9 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
+ GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
run: |
mkdir -p /tmp/gh-aw/mcp-config
mkdir -p /home/runner/.copilot
@@ -3173,7 +3205,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3469,6 +3501,9 @@ jobs:
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
@@ -6515,6 +6550,13 @@ jobs:
with:
name: cache-memory
path: /tmp/gh-aw/cache-memory
+ - name: Upload safe outputs assets
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7719,10 +7761,13 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: read
+ contents: write
issues: write
timeout-minutes: 15
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "intelligence"
GH_AW_WORKFLOW_NAME: "Campaign Intelligence System"
@@ -8678,6 +8723,145 @@ jobs:
(async () => {
await main();
})();
+ - name: Upload Assets
+ id: upload_assets
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ globalThis.github = github;
+ globalThis.context = context;
+ globalThis.core = core;
+ globalThis.exec = exec;
+ globalThis.io = io;
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
+ if (allUploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of allUploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of allUploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ (async () => { await main(); })();
update_cache_memory:
needs:
diff --git a/.github/workflows/issue-classifier.lock.yml b/.github/workflows/issue-classifier.lock.yml
index f86933a2ed9..d578b06cb3f 100644
--- a/.github/workflows/issue-classifier.lock.yml
+++ b/.github/workflows/issue-classifier.lock.yml
@@ -2995,7 +2995,7 @@ jobs:
path: /tmp/gh-aw/aw_info.json
if-no-files-found: warn
- name: Run AI Inference
- uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
+ uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v1
env:
GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml
index d6027623284..96ab016ee7f 100644
--- a/.github/workflows/issue-monster.lock.yml
+++ b/.github/workflows/issue-monster.lock.yml
@@ -8449,7 +8449,7 @@ jobs:
steps:
- name: Search for candidate issues
id: search
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { owner, repo } = context.repo;
diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml
index cfcd912f635..a0145b73f0a 100644
--- a/.github/workflows/mcp-inspector.lock.yml
+++ b/.github/workflows/mcp-inspector.lock.yml
@@ -6944,7 +6944,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
@@ -7074,7 +7074,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Post message to Slack
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
SLACK_CHANNEL_ID: ${{ env.GH_AW_SLACK_CHANNEL_ID }}
diff --git a/.github/workflows/notion-issue-summary.lock.yml b/.github/workflows/notion-issue-summary.lock.yml
index ebbdfd85f67..8f003a4ca32 100644
--- a/.github/workflows/notion-issue-summary.lock.yml
+++ b/.github/workflows/notion-issue-summary.lock.yml
@@ -6340,7 +6340,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml
index 6f8a5b19c6c..c9068584815 100644
--- a/.github/workflows/org-health-report.lock.yml
+++ b/.github/workflows/org-health-report.lock.yml
@@ -2862,7 +2862,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8450,9 +8450,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/org-health-report.md b/.github/workflows/org-health-report.md
index 48d8031d93f..bd4d6506daf 100644
--- a/.github/workflows/org-health-report.md
+++ b/.github/workflows/org-health-report.md
@@ -25,7 +25,7 @@ safe-outputs:
category: "reports"
max: 1
close-older-discussions: true
- upload-assets:
+ upload-asset:
timeout-minutes: 60
strict: true
network:
@@ -485,4 +485,4 @@ A successful health report:
- ✅ Publishes report as GitHub Discussion
- ✅ Completes within 60 minute timeout
-Begin the organization health report analysis now. Follow the phases in order, add appropriate delays, and generate a comprehensive report for maintainers.
+Begin the organization health report analysis now. Follow the phases in order, add appropriate delays, and generate a comprehensive report for maintainers.
\ No newline at end of file
diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml
index e79bd0dac7b..2c91b7c7669 100644
--- a/.github/workflows/poem-bot.lock.yml
+++ b/.github/workflows/poem-bot.lock.yml
@@ -3367,7 +3367,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: add_comment, add_labels, close_pull_request, create_agent_task, create_discussion, create_issue, create_pull_request, create_pull_request_review_comment, link_sub_issue, missing_tool, noop, push_to_pull_request_branch, update_issue, upload_assets
+ **Available tools**: add_comment, add_labels, close_pull_request, create_agent_task, create_discussion, create_issue, create_pull_request, create_pull_request_review_comment, link_sub_issue, missing_tool, noop, push_to_pull_request_branch, update_issue, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -13306,9 +13306,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/poem-bot.md b/.github/workflows/poem-bot.md
index 5f19bc31b9c..9ef6cc10ec0 100644
--- a/.github/workflows/poem-bot.md
+++ b/.github/workflows/poem-bot.md
@@ -119,7 +119,7 @@ safe-outputs:
base: main
# Upload assets
- upload-assets:
+ upload-asset:
# Missing tool reporting
missing-tool:
@@ -180,4 +180,4 @@ Use the safe-outputs capabilities to:
## Begin Your Poetic Journey!
-Examine the current context and create your masterpiece! Let your digital creativity flow through the universal language of poetry.
+Examine the current context and create your masterpiece! Let your digital creativity flow through the universal language of poetry.
\ No newline at end of file
diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml
index 9d71bd8dd56..06d3121a48d 100644
--- a/.github/workflows/portfolio-analyst.lock.yml
+++ b/.github/workflows/portfolio-analyst.lock.yml
@@ -2852,7 +2852,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8445,9 +8445,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/portfolio-analyst.md b/.github/workflows/portfolio-analyst.md
index d1f042d09af..6a3b624e9cc 100644
--- a/.github/workflows/portfolio-analyst.md
+++ b/.github/workflows/portfolio-analyst.md
@@ -28,7 +28,7 @@ safe-outputs:
title-prefix: "[portfolio] "
category: "Audits"
close-older-discussions: true
- upload-assets:
+ upload-asset:
timeout-minutes: 20
imports:
- shared/mcp/gh-aw.md
@@ -573,4 +573,4 @@ print("✅ All charts generated")
✅ Healthy workflows are briefly mentioned but not analyzed
✅ All dollar amounts are from actual workflow execution data
-Begin your analysis now. **FIRST**: Generate all 4 required charts from `/tmp/portfolio-logs/summary.json` and upload them as assets. **THEN**: Create the dashboard-style discussion with embedded chart URLs. Read from the pre-downloaded JSON file at `/tmp/portfolio-logs/summary.json` to get real execution data for all workflows. This file contains everything you need: summary metrics and individual run data. DO NOT attempt to call `gh aw logs` or any `gh` commands - they will not work. Move fast, focus on high-impact issues, deliver actionable recommendations based on actual costs, and make the report visual and scannable.
+Begin your analysis now. **FIRST**: Generate all 4 required charts from `/tmp/portfolio-logs/summary.json` and upload them as assets. **THEN**: Create the dashboard-style discussion with embedded chart URLs. Read from the pre-downloaded JSON file at `/tmp/portfolio-logs/summary.json` to get real execution data for all workflows. This file contains everything you need: summary metrics and individual run data. DO NOT attempt to call `gh aw logs` or any `gh` commands - they will not work. Move fast, focus on high-impact issues, deliver actionable recommendations based on actual costs, and make the report visual and scannable.
\ No newline at end of file
diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml
index 2fb324a7eac..cc9955aefd4 100644
--- a/.github/workflows/prompt-clustering-analysis.lock.yml
+++ b/.github/workflows/prompt-clustering-analysis.lock.yml
@@ -173,7 +173,7 @@ jobs:
- name: Set up jq utilities directory
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
@@ -198,7 +198,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-charts
@@ -206,7 +206,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml
index bf5e583f1a7..9db54f824a3 100644
--- a/.github/workflows/python-data-charts.lock.yml
+++ b/.github/workflows/python-data-charts.lock.yml
@@ -3152,7 +3152,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8742,9 +8742,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/python-data-charts.md b/.github/workflows/python-data-charts.md
index 26f2d1a93fe..cea14e444fc 100644
--- a/.github/workflows/python-data-charts.md
+++ b/.github/workflows/python-data-charts.md
@@ -14,7 +14,7 @@ tools:
imports:
- shared/charts-with-trending.md
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "artifacts"
max: 1
@@ -168,5 +168,4 @@ This report contains data visualizations and trending analysis generated using P
- ✅ **High Quality**: Use DPI 300, clear labels, and seaborn styling
- ✅ **Document Cache**: Report on cache status and trending capabilities
-Refer to the Charts with Trending Guide (imported above) for complete examples, trending patterns, cache-memory integration, and best practices.
-
+Refer to the Charts with Trending Guide (imported above) for complete examples, trending patterns, cache-memory integration, and best practices.
\ No newline at end of file
diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml
index 0737a63983f..e203e68d272 100644
--- a/.github/workflows/release.lock.yml
+++ b/.github/workflows/release.lock.yml
@@ -6489,28 +6489,28 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: false
go-version-file: go.mod
- name: Download Go modules
run: go mod download
- name: Generate SBOM (SPDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
with:
artifact-name: sbom.spdx.json
format: spdx-json
output-file: sbom.spdx.json
- name: Generate SBOM (CycloneDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
with:
artifact-name: sbom.cdx.json
format: cyclonedx-json
output-file: sbom.cdx.json
- name: Upload SBOM artifacts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
name: sbom-artifacts
path: |
@@ -6689,12 +6689,12 @@ jobs:
release_tag: ${{ steps.get_release.outputs.release_tag }}
steps:
- name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
fetch-depth: 0
persist-credentials: false
- name: Release with gh-extension-precompile
- uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2.1.0
+ uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2
with:
build_script_override: scripts/build-release.sh
go_version_file: go.mod
diff --git a/.github/workflows/safe-output-health.lock.yml b/.github/workflows/safe-output-health.lock.yml
index b7cc22f6fe6..d294d1f9053 100644
--- a/.github/workflows/safe-output-health.lock.yml
+++ b/.github/workflows/safe-output-health.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/shared/python-dataviz.md b/.github/workflows/shared/python-dataviz.md
index a7d8a98b73f..e6dfc854730 100644
--- a/.github/workflows/shared/python-dataviz.md
+++ b/.github/workflows/shared/python-dataviz.md
@@ -26,7 +26,7 @@ network:
- python
safe-outputs:
- upload-assets:
+ upload-asset:
steps:
- name: Setup Python environment
diff --git a/.github/workflows/slide-deck-maintainer.lock.yml b/.github/workflows/slide-deck-maintainer.lock.yml
index 0d5ef45097b..36a4eb67f44 100644
--- a/.github/workflows/slide-deck-maintainer.lock.yml
+++ b/.github/workflows/slide-deck-maintainer.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
cache: npm
cache-dependency-path: docs/package-lock.json
diff --git a/.github/workflows/smoke-copilot-playwright.lock.yml b/.github/workflows/smoke-copilot-playwright.lock.yml
index fb2889dbc16..eec59eb6873 100644
--- a/.github/workflows/smoke-copilot-playwright.lock.yml
+++ b/.github/workflows/smoke-copilot-playwright.lock.yml
@@ -7624,7 +7624,7 @@ jobs:
run: "echo \"📋 Collecting Playwright MCP logs...\"\n\n# Create logs directory\nmkdir -p /tmp/gh-aw/playwright-debug-logs\n\n# Copy any playwright logs from the MCP logs directory\nif [ -d \"/tmp/gh-aw/mcp-logs/playwright\" ]; then\n echo \"Found Playwright MCP logs directory\"\n cp -r /tmp/gh-aw/mcp-logs/playwright/* /tmp/gh-aw/playwright-debug-logs/ 2>/dev/null || true\n ls -la /tmp/gh-aw/playwright-debug-logs/\nelse\n echo \"No Playwright MCP logs directory found at /tmp/gh-aw/mcp-logs/playwright\"\nfi\n\n# List all trace files if any\necho \"Looking for trace files...\"\nfind /tmp -name \"*.zip\" -o -name \"trace*\" 2>/dev/null | head -20 || true\n\n# Show docker container logs if any containers are still running\necho \"Checking for running Docker containers...\"\ndocker ps -a --format \"table {{.Names}}\\t{{.Status}}\\t{{.Image}}\" 2>/dev/null || true\n"
- if: always()
name: Upload Playwright Debug Logs
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: ignore
name: playwright-debug-logs-${{ github.run_id }}
diff --git a/.github/workflows/smoke-detector.lock.yml b/.github/workflows/smoke-detector.lock.yml
index 82f4620e81d..4cfccf299c4 100644
--- a/.github/workflows/smoke-detector.lock.yml
+++ b/.github/workflows/smoke-detector.lock.yml
@@ -595,7 +595,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index 3ca0d732add..df5b1edcc05 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -2885,7 +2885,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8252,9 +8252,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/stale-repo-identifier.md b/.github/workflows/stale-repo-identifier.md
index aa73bb95465..ff6cc1251b7 100644
--- a/.github/workflows/stale-repo-identifier.md
+++ b/.github/workflows/stale-repo-identifier.md
@@ -36,7 +36,7 @@ safe-outputs:
title-prefix: "[Stale Repository] "
labels: [stale-repository, automated-analysis]
max: 10
- upload-assets:
+ upload-asset:
messages:
footer: "> 🔍 *Analysis by [{workflow_name}]({run_url})*"
run-started: "🔍 Stale Repository Identifier starting! [{workflow_name}]({run_url}) is analyzing repository activity..."
@@ -352,4 +352,4 @@ To avoid GitHub API rate limits:
- Create GitHub issues for repositories needing attention (max 10)
- Print summary statistics to stdout
-- Be clear and actionable in recommendations
+- Be clear and actionable in recommendations
\ No newline at end of file
diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml
index e69cb06ecc1..4ece510b61b 100644
--- a/.github/workflows/static-analysis-report.lock.yml
+++ b/.github/workflows/static-analysis-report.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml
index fa2948866cc..8e569416580 100644
--- a/.github/workflows/super-linter.lock.yml
+++ b/.github/workflows/super-linter.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Download super-linter log
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6
with:
name: super-linter-log
path: /tmp/gh-aw/
@@ -7536,13 +7536,13 @@ jobs:
steps:
- name: Checkout Code
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
fetch-depth: 0
persist-credentials: false
- name: Super-linter
id: super-linter
- uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.3.1
+ uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.2.1
env:
CREATE_LOG_FILE: "true"
DEFAULT_BRANCH: main
@@ -7564,7 +7564,7 @@ jobs:
fi
- name: Upload super-linter log
if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
name: super-linter-log
path: super-linter.log
diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml
index 7717aae3152..197e950bad3 100644
--- a/.github/workflows/technical-doc-writer.lock.yml
+++ b/.github/workflows/technical-doc-writer.lock.yml
@@ -2546,7 +2546,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_assets
+ **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8774,9 +8774,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/technical-doc-writer.md b/.github/workflows/technical-doc-writer.md
index 3ad96a21270..1843a973ed2 100644
--- a/.github/workflows/technical-doc-writer.md
+++ b/.github/workflows/technical-doc-writer.md
@@ -34,7 +34,7 @@ safe-outputs:
labels: [documentation]
reviewers: copilot
draft: false
- upload-assets:
+ upload-asset:
messages:
footer: "> 📝 *Documentation by [{workflow_name}]({run_url})*"
run-started: "✍️ The Technical Writer begins! [{workflow_name}]({run_url}) is documenting this {event_type}..."
@@ -106,4 +106,4 @@ When reviewing documentation for the specified topic in the **docs/** folder, ap
Keep your feedback specific, actionable, and empathetic. Focus on the most impactful improvements for the topic: "${{ github.event.inputs.topic }}"
-You have access to cache-memory for persistent storage across runs, which you can use to track documentation patterns and improvement suggestions.
+You have access to cache-memory for persistent storage across runs, which you can use to track documentation patterns and improvement suggestions.
\ No newline at end of file
diff --git a/.github/workflows/tidy.lock.yml b/.github/workflows/tidy.lock.yml
index 849c95c24e4..09887cdcb0b 100644
--- a/.github/workflows/tidy.lock.yml
+++ b/.github/workflows/tidy.lock.yml
@@ -577,13 +577,13 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
cache: npm
cache-dependency-path: pkg/workflow/js/package-lock.json
node-version: "24"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml
index 1f51305800c..b2e44ceb8d5 100644
--- a/.github/workflows/unbloat-docs.lock.yml
+++ b/.github/workflows/unbloat-docs.lock.yml
@@ -2830,7 +2830,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_assets
+ **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8981,9 +8981,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/unbloat-docs.md b/.github/workflows/unbloat-docs.md
index c7060604b8c..897ec0f7feb 100644
--- a/.github/workflows/unbloat-docs.md
+++ b/.github/workflows/unbloat-docs.md
@@ -69,7 +69,7 @@ safe-outputs:
draft: true
add-comment:
max: 1
- upload-assets:
+ upload-asset:
messages:
footer: "> 🗜️ *Compressed by [{workflow_name}]({run_url})*"
run-started: "📦 Time to slim down! [{workflow_name}]({run_url}) is trimming the excess from this {event_type}..."
@@ -338,4 +338,4 @@ A successful run:
- ✅ Includes HD screenshots (1920x1080) of the modified documentation page(s) in the Astro Starlight website
- ✅ Reports any blocked domains for CSS/fonts (if encountered)
-Begin by scanning the docs directory and selecting the best candidate for improvement!
+Begin by scanning the docs directory and selecting the best candidate for improvement!
\ No newline at end of file
diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml
index 4037ead1aa7..0f2072e27ae 100644
--- a/.github/workflows/weekly-issue-summary.lock.yml
+++ b/.github/workflows/weekly-issue-summary.lock.yml
@@ -2655,7 +2655,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -8248,9 +8248,12 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
+ if (legacyUploadAssetsItems.length > 0) {
+ core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
+ }
+ const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
if (allUploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
diff --git a/.github/workflows/weekly-issue-summary.md b/.github/workflows/weekly-issue-summary.md
index d1187afa0ce..2e10242cc6d 100644
--- a/.github/workflows/weekly-issue-summary.md
+++ b/.github/workflows/weekly-issue-summary.md
@@ -25,7 +25,7 @@ tools:
toolsets:
- issues
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
title-prefix: "[Weekly Summary] "
category: "Audits"
diff --git a/docs/slides/index.md b/docs/slides/index.md
index 5cabb34bbdf..b5ee7536ec2 100644
--- a/docs/slides/index.md
+++ b/docs/slides/index.md
@@ -320,7 +320,7 @@ tools:
playwright: # Headless browser automation
safe-outputs:
create-issue:
- upload-assets: # Attach screenshots to artifacts
+ upload-asset: # Attach screenshots to artifacts
---
Test the web application:
1. Navigate to the deployed preview URL
diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md
index edcd93074bd..5fc69782b9d 100644
--- a/docs/src/content/docs/reference/frontmatter-full.md
+++ b/docs/src/content/docs/reference/frontmatter-full.md
@@ -2629,7 +2629,7 @@ safe-outputs:
# This field supports multiple formats (oneOf):
# Option 1: Configuration for publishing assets to an orphaned git branch
- upload-assets:
+ upload-asset:
# Branch name (default: 'assets/${{ github.workflow }}')
# (optional)
branch: "example-value"
@@ -2653,7 +2653,7 @@ safe-outputs:
github-token: "${{ secrets.GITHUB_TOKEN }}"
# Option 2: Enable asset publishing with default configuration
- upload-assets: null
+ upload-asset: null
# (optional)
# This field supports multiple formats (oneOf):
diff --git a/docs/src/content/docs/reference/glossary.md b/docs/src/content/docs/reference/glossary.md
index ae941ac5800..85842673cb4 100644
--- a/docs/src/content/docs/reference/glossary.md
+++ b/docs/src/content/docs/reference/glossary.md
@@ -106,7 +106,7 @@ A safe output capability that allows workflows to upload generated files (screen
```yaml
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/my-workflow" # branch name (default: "assets/${{ github.workflow }}")
max-size: 10240 # max file size in KB (default: 10MB)
allowed-exts: [.png, .jpg, .svg] # allowed extensions
diff --git a/docs/src/content/docs/reference/safe-outputs.md b/docs/src/content/docs/reference/safe-outputs.md
index c7780c28886..e153c9578c6 100644
--- a/docs/src/content/docs/reference/safe-outputs.md
+++ b/docs/src/content/docs/reference/safe-outputs.md
@@ -57,7 +57,7 @@ This declares that the workflow should create at most one new issue. The AI agen
| [**Assign to User**](#assign-to-user-assign-to-user) | `assign-to-user:` | Assign users to issues | 1 | ✅ |
| [**Push to PR Branch**](#push-to-pr-branch-push-to-pull-request-branch) | `push-to-pull-request-branch:` | Push changes to PR branch | 1 | ❌ |
| [**Update Release**](#release-updates-update-release) | `update-release:` | Update GitHub release descriptions | 1 | ✅ |
-| [**Upload Assets**](#asset-uploads-upload-assets) | `upload-assets:` | Upload files to orphaned git branch | 10 | ❌ |
+| [**Upload Assets**](#asset-uploads-upload-asset) | `upload-asset:` | Upload files to orphaned git branch | 10 | ❌ |
| [**Code Scanning Alerts**](#code-scanning-alerts-create-code-scanning-alert) | `create-code-scanning-alert:` | Generate SARIF security advisories | unlimited | ❌ |
| [**No-Op**](#no-op-logging-noop) | `noop:` | Log completion message for transparency (auto-enabled) | 1 | ❌ |
| [**Missing Tool**](#missing-tool-reporting-missing-tool) | `missing-tool:` | Report missing tools (auto-enabled) | unlimited | ❌ |
@@ -462,7 +462,7 @@ safe-outputs:
Agent output format: `{"type": "update_release", "tag": "v1.0.0", "operation": "replace", "body": "..."}`. The `tag` field is optional for release events (inferred from context). Workflow needs read access; only the generated job receives write permissions.
-### Asset Uploads (`upload-assets:`)
+### Asset Uploads (`upload-asset:`)
Uploads generated files (screenshots, charts, reports, diagrams) to an orphaned git branch for persistent, version-controlled storage. Assets are uploaded without requiring elevated permissions during agent execution—a separate job with `contents: write` handles the actual commit and push.
@@ -475,7 +475,7 @@ Uploads generated files (screenshots, charts, reports, diagrams) to an orphaned
```yaml wrap
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/my-workflow" # branch name (default: `"assets/${{ github.workflow }}"`)
max-size: 5120 # max file size in KB (default: 10240 = 10MB)
allowed-exts: [.png, .jpg, .svg] # allowed extensions (default: [.png, .jpg, .jpeg])
@@ -567,7 +567,7 @@ on: schedule
tools:
playwright:
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/screenshots"
allowed-exts: [.png]
max: 50
@@ -587,7 +587,7 @@ on: schedule
tools:
bash:
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/charts"
allowed-exts: [.png, .svg]
max-size: 2048
diff --git a/pkg/cli/workflows/test-claude-playwright-screenshots.md b/pkg/cli/workflows/test-claude-playwright-screenshots.md
index 21c354e062f..d6e5f4612d2 100644
--- a/pkg/cli/workflows/test-claude-playwright-screenshots.md
+++ b/pkg/cli/workflows/test-claude-playwright-screenshots.md
@@ -8,7 +8,7 @@ on:
- pelikhan/*
safe-outputs:
staged: true
- upload-assets:
+ upload-asset:
create-issue:
title-prefix: "[docs] "
engine:
diff --git a/pkg/cli/workflows/test-copilot-playwright-screenshots.md b/pkg/cli/workflows/test-copilot-playwright-screenshots.md
index 5ed7bcc476a..fe4be9d0294 100644
--- a/pkg/cli/workflows/test-copilot-playwright-screenshots.md
+++ b/pkg/cli/workflows/test-copilot-playwright-screenshots.md
@@ -8,7 +8,7 @@ on:
- pelikhan/*
safe-outputs:
staged: true
- upload-assets:
+ upload-asset:
create-issue:
title-prefix: "[docs] "
engine:
diff --git a/pkg/cli/workflows/test-playwright-args.md b/pkg/cli/workflows/test-playwright-args.md
index 746421cd86a..cade0161fd6 100644
--- a/pkg/cli/workflows/test-playwright-args.md
+++ b/pkg/cli/workflows/test-playwright-args.md
@@ -5,7 +5,7 @@ permissions: read-all
engine: claude
safe-outputs:
staged: true
- upload-assets:
+ upload-asset:
create-issue:
title-prefix: "[test] "
tools:
diff --git a/pkg/workflow/js/upload_assets.test.cjs b/pkg/workflow/js/upload_assets.test.cjs
index c6210d5294a..7399f2a1ff1 100644
--- a/pkg/workflow/js/upload_assets.test.cjs
+++ b/pkg/workflow/js/upload_assets.test.cjs
@@ -64,7 +64,7 @@ const mockCore = { debug: vi.fn(), info: vi.fn(), notice: vi.fn(), warning: vi.f
}),
describe("legacy upload-assets type handling", () => {
it("should warn about legacy upload-assets type (plural hyphenated)", async () => {
- (process.env.GH_AW_ASSETS_BRANCH = "assets/test-workflow"), (process.env.GH_AW_SAFE_OUTPUTS_STAGED = "false");
+ ((process.env.GH_AW_ASSETS_BRANCH = "assets/test-workflow"), (process.env.GH_AW_SAFE_OUTPUTS_STAGED = "false"));
const assetDir = "/tmp/gh-aw/safeoutputs/assets";
fs.existsSync(assetDir) || fs.mkdirSync(assetDir, { recursive: !0 });
const assetPath = path.join(assetDir, "test.png");
diff --git a/specs/safe-output-environment-variables.md b/specs/safe-output-environment-variables.md
index ebe6c69d8ef..6e7b585246a 100644
--- a/specs/safe-output-environment-variables.md
+++ b/specs/safe-output-environment-variables.md
@@ -140,7 +140,7 @@ Each safe output type has additional environment variables specific to its confi
| `GH_AW_COMMENT_ID` | Comment ID from activation job | Command-triggered workflow | From activation job output |
| `GH_AW_COMMENT_REPO` | Repository containing comment | Command-triggered workflow | From activation job output |
-### Upload Assets (`upload-assets:`)
+### Upload Assets (`upload-asset:`)
| Variable | Description | Set When | Example |
|----------|-------------|----------|---------|
From c773365220f52eeec841565550b53dcbd8f8760f Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 17:28:04 +0000
Subject: [PATCH 05/11] Remove legacy upload-assets handling from JavaScript
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
pkg/workflow/js/upload_assets.cjs | 18 +++++-------------
pkg/workflow/js/upload_assets.test.cjs | 19 -------------------
2 files changed, 5 insertions(+), 32 deletions(-)
diff --git a/pkg/workflow/js/upload_assets.cjs b/pkg/workflow/js/upload_assets.cjs
index 7c37a420582..325b12164c5 100644
--- a/pkg/workflow/js/upload_assets.cjs
+++ b/pkg/workflow/js/upload_assets.cjs
@@ -76,25 +76,17 @@ async function main() {
return;
}
- // Find all upload-asset items (singular is the standard)
+ // Find all upload-asset items
const uploadItems = result.items.filter(/** @param {any} item */ item => item.type === "upload_asset");
- // Handle legacy upload-assets (plural, hyphenated) with warning
- const legacyUploadAssetsItems = result.items.filter(/** @param {any} item */ item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
-
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
-
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
@@ -124,7 +116,7 @@ async function main() {
}
// Process each asset
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
@@ -182,7 +174,7 @@ async function main() {
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/pkg/workflow/js/upload_assets.test.cjs b/pkg/workflow/js/upload_assets.test.cjs
index 7399f2a1ff1..e87087c5357 100644
--- a/pkg/workflow/js/upload_assets.test.cjs
+++ b/pkg/workflow/js/upload_assets.test.cjs
@@ -62,25 +62,6 @@ const mockCore = { debug: vi.fn(), info: vi.fn(), notice: vi.fn(), warning: vi.f
(expect(branchNameCall).toBeDefined(), expect(branchNameCall[1]).toBe("assets/my-branch"));
});
}),
- describe("legacy upload-assets type handling", () => {
- it("should warn about legacy upload-assets type (plural hyphenated)", async () => {
- ((process.env.GH_AW_ASSETS_BRANCH = "assets/test-workflow"), (process.env.GH_AW_SAFE_OUTPUTS_STAGED = "false"));
- const assetDir = "/tmp/gh-aw/safeoutputs/assets";
- fs.existsSync(assetDir) || fs.mkdirSync(assetDir, { recursive: !0 });
- const assetPath = path.join(assetDir, "test.png");
- fs.writeFileSync(assetPath, "fake png data");
- const crypto = require("crypto"),
- fileContent = fs.readFileSync(assetPath),
- agentOutput = {
- items: [{ type: "upload-assets", fileName: "test.png", sha: crypto.createHash("sha256").update(fileContent).digest("hex"), size: fileContent.length, targetFileName: "test.png", url: "https://example.com/test.png" }],
- };
- setAgentOutput(agentOutput);
- (mockExec.exec.mockImplementation(async () => 0), await executeScript());
- const warningCalls = mockCore.warning.mock.calls.filter(call => call[0].includes("legacy type"));
- (expect(warningCalls.length).toBeGreaterThan(0), expect(warningCalls[0][0]).toContain("upload-assets"), expect(warningCalls[0][0]).toContain("deprecated"));
- fs.existsSync(assetPath) && fs.unlinkSync(assetPath);
- });
- }),
describe("branch prefix validation", () => {
(it("should allow creating orphaned branch with 'assets/' prefix when branch doesn't exist", async () => {
(fs.existsSync("test.png") && fs.unlinkSync("test.png"), (process.env.GH_AW_ASSETS_BRANCH = "assets/test-workflow"), (process.env.GH_AW_SAFE_OUTPUTS_STAGED = "false"));
From 889b421e302fcd7ab2cf2d9a13b241786d6ccd12 Mon Sep 17 00:00:00 2001
From: "github-actions[bot]"
Date: Mon, 22 Dec 2025 17:37:43 +0000
Subject: [PATCH 06/11] Add changeset [skip-ci]
---
.changeset/patch-standardize-upload-asset.md | 9 +++++++++
1 file changed, 9 insertions(+)
create mode 100644 .changeset/patch-standardize-upload-asset.md
diff --git a/.changeset/patch-standardize-upload-asset.md b/.changeset/patch-standardize-upload-asset.md
new file mode 100644
index 00000000000..a051568cdf4
--- /dev/null
+++ b/.changeset/patch-standardize-upload-asset.md
@@ -0,0 +1,9 @@
+---
+"gh-aw": patch
+---
+
+Standardize safe output references to singular "upload-asset" across schemas,
+parsing, and processing logic. Includes a codemod to migrate existing workflows
+and updates to tests and documentation. This is a non-breaking internal
+standardization and tooling change.
+
From e937816c52c96fdfcafdf4f4206f19b20504f627 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 17:49:20 +0000
Subject: [PATCH 07/11] Move upload_assets to separate job with git
configuration
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/audit-workflows.lock.yml | 13 +-
.../copilot-pr-nlp-analysis.lock.yml | 13 +-
.../copilot-session-insights.lock.yml | 13 +-
.../daily-copilot-token-report.lock.yml | 13 +-
.github/workflows/daily-file-diet.lock.yml | 13 +-
.../workflows/daily-firewall-report.lock.yml | 13 +-
.../workflows/daily-issues-report.lock.yml | 13 +-
.../daily-multi-device-docs-tester.lock.yml | 13 +-
.github/workflows/daily-news.lock.yml | 13 +-
.../daily-performance-summary.lock.yml | 13 +-
.../workflows/daily-repo-chronicle.lock.yml | 13 +-
.github/workflows/deep-report.lock.yml | 13 +-
.github/workflows/docs-noob-tester.lock.yml | 13 +-
.../github-mcp-structural-analysis.lock.yml | 13 +-
.github/workflows/intelligence.lock.yml | 13 +-
.github/workflows/org-health-report.lock.yml | 13 +-
.github/workflows/poem-bot.lock.yml | 371 +++++++++++-------
.github/workflows/portfolio-analyst.lock.yml | 13 +-
.github/workflows/python-data-charts.lock.yml | 13 +-
.../workflows/stale-repo-identifier.lock.yml | 13 +-
.../workflows/technical-doc-writer.lock.yml | 13 +-
.github/workflows/unbloat-docs.lock.yml | 13 +-
.../workflows/weekly-issue-summary.lock.yml | 13 +-
pkg/workflow/compiler_safe_output_jobs.go | 18 +
pkg/workflow/compiler_safe_outputs_core.go | 26 +-
25 files changed, 341 insertions(+), 360 deletions(-)
diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml
index 2abf5ef322b..32ca27b985c 100644
--- a/.github/workflows/audit-workflows.lock.yml
+++ b/.github/workflows/audit-workflows.lock.yml
@@ -7632,18 +7632,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -7665,7 +7660,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7706,7 +7701,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index 1ed8416218f..8a4f69736f4 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -8525,18 +8525,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8558,7 +8553,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8599,7 +8594,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml
index bf89f941069..f5e50df94ed 100644
--- a/.github/workflows/copilot-session-insights.lock.yml
+++ b/.github/workflows/copilot-session-insights.lock.yml
@@ -8617,18 +8617,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8650,7 +8645,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8691,7 +8686,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml
index 883ef912c39..a26eef0647b 100644
--- a/.github/workflows/daily-copilot-token-report.lock.yml
+++ b/.github/workflows/daily-copilot-token-report.lock.yml
@@ -8615,18 +8615,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8648,7 +8643,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8689,7 +8684,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 2fdc4fc5dd5..76f1370ca6d 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -8781,18 +8781,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8814,7 +8809,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8855,7 +8850,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index 71e54979989..53371842223 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -8298,18 +8298,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8331,7 +8326,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8372,7 +8367,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml
index 2241c5af6a9..dc5eb908f8e 100644
--- a/.github/workflows/daily-issues-report.lock.yml
+++ b/.github/workflows/daily-issues-report.lock.yml
@@ -9172,18 +9172,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -9205,7 +9200,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -9246,7 +9241,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml
index a40f2665ece..fef0723a2e5 100644
--- a/.github/workflows/daily-multi-device-docs-tester.lock.yml
+++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml
@@ -7232,18 +7232,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -7265,7 +7260,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7306,7 +7301,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index 8621af76077..0a06fc6b13f 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -8420,18 +8420,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8453,7 +8448,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8494,7 +8489,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml
index 02376d1ded0..b82cd4a363d 100644
--- a/.github/workflows/daily-performance-summary.lock.yml
+++ b/.github/workflows/daily-performance-summary.lock.yml
@@ -10243,18 +10243,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -10276,7 +10271,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -10317,7 +10312,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml
index cc32a5433d7..93745a445ff 100644
--- a/.github/workflows/daily-repo-chronicle.lock.yml
+++ b/.github/workflows/daily-repo-chronicle.lock.yml
@@ -8341,18 +8341,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8374,7 +8369,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8415,7 +8410,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index 75a3cc18e08..80c142f9e1e 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -8340,18 +8340,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8373,7 +8368,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8414,7 +8409,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml
index e586c872738..6ca60ee894b 100644
--- a/.github/workflows/docs-noob-tester.lock.yml
+++ b/.github/workflows/docs-noob-tester.lock.yml
@@ -7808,18 +7808,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -7841,7 +7836,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7882,7 +7877,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml
index 7ae97f350f3..5220e97ed13 100644
--- a/.github/workflows/github-mcp-structural-analysis.lock.yml
+++ b/.github/workflows/github-mcp-structural-analysis.lock.yml
@@ -7970,18 +7970,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8003,7 +7998,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8044,7 +8039,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index 74ee92ba9cd..c7ba61c5764 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -8771,18 +8771,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8804,7 +8799,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8845,7 +8840,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml
index c9068584815..3da9b0eb741 100644
--- a/.github/workflows/org-health-report.lock.yml
+++ b/.github/workflows/org-health-report.lock.yml
@@ -8451,18 +8451,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8484,7 +8479,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8525,7 +8520,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml
index 2c91b7c7669..47ce62eabe2 100644
--- a/.github/workflows/poem-bot.lock.yml
+++ b/.github/workflows/poem-bot.lock.yml
@@ -7007,6 +7007,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7984,9 +7985,6 @@ jobs:
pull-requests: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_ENGINE_MODEL: "gpt-5"
GH_AW_SAFE_OUTPUTS_STAGED: "true"
@@ -13258,146 +13256,6 @@ jobs:
await core.summary.addRaw(summaryContent).write();
}
(async () => { await main(); })();
- - name: Upload Assets
- id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- env:
- GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- GH_AW_SAFE_OUTPUTS_STAGED: "true"
- with:
- github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
- const fs = require("fs");
- const path = require("path");
- const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
- function normalizeBranchName(branchName) {
- if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
- return branchName;
- }
- let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
- normalized = normalized.replace(/-+/g, "-");
- normalized = normalized.replace(/^-+|-+$/g, "");
- if (normalized.length > 128) {
- normalized = normalized.substring(0, 128);
- }
- normalized = normalized.replace(/-+$/, "");
- normalized = normalized.toLowerCase();
- return normalized;
- }
- async function main() {
- const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
- const branchName = process.env.GH_AW_ASSETS_BRANCH;
- if (!branchName || typeof branchName !== "string") {
- core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
- return;
- }
- const normalizedBranchName = normalizeBranchName(branchName);
- core.info(`Using assets branch: ${normalizedBranchName}`);
- const result = loadAgentOutput();
- if (!result.success) {
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
- core.info("No upload-asset items found in agent output");
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
- let uploadCount = 0;
- let hasChanges = false;
- try {
- try {
- await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
- await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
- core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
- } catch (originError) {
- if (!normalizedBranchName.startsWith("assets/")) {
- core.setFailed(
- `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
- `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
- `Please create the branch manually first, or use a branch name starting with 'assets/'.`
- );
- return;
- }
- core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
- await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
- await exec.exec(`git rm -rf .`);
- await exec.exec(`git clean -fdx`);
- }
- for (const asset of allUploadItems) {
- try {
- const { fileName, sha, size, targetFileName } = asset;
- if (!fileName || !sha || !targetFileName) {
- core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
- continue;
- }
- const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
- if (!fs.existsSync(assetSourcePath)) {
- core.warning(`Asset file not found: ${assetSourcePath}`);
- continue;
- }
- const fileContent = fs.readFileSync(assetSourcePath);
- const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
- if (computedSha !== sha) {
- core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
- continue;
- }
- if (fs.existsSync(targetFileName)) {
- core.info(`Asset ${targetFileName} already exists, skipping`);
- continue;
- }
- fs.copyFileSync(assetSourcePath, targetFileName);
- await exec.exec(`git add "${targetFileName}"`);
- uploadCount++;
- hasChanges = true;
- core.info(`Added asset: ${targetFileName} (${size} bytes)`);
- } catch (error) {
- core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
- }
- }
- if (hasChanges) {
- const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
- await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
- if (isStaged) {
- core.summary.addRaw("## Staged Asset Publication");
- } else {
- await exec.exec(`git push origin ${normalizedBranchName}`);
- core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
- core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
- }
- for (const asset of allUploadItems) {
- if (asset.fileName && asset.sha && asset.size && asset.url) {
- core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
- }
- }
- core.summary.write();
- } else {
- core.info("No new assets to upload");
- }
- } catch (error) {
- core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
- return;
- }
- core.setOutput("upload_count", uploadCount.toString());
- core.setOutput("branch_name", normalizedBranchName);
- }
- (async () => { await main(); })();
- name: Link Sub Issue
id: link_sub_issue
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'link_sub_issue'))
@@ -13874,3 +13732,230 @@ jobs:
key: poem-memory-${{ github.workflow }}-${{ github.run_id }}
path: /tmp/gh-aw/cache-memory
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
+ id: upload_assets
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Poem Bot - A Creative Agentic Workflow"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_ENGINE_MODEL: "gpt-5"
+ GH_AW_SAFE_OUTPUTS_STAGED: "true"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🪶 *Verses penned by [{workflow_name}]({run_url})*\",\"runStarted\":\"🎭 Hear ye! The muse stirs! [{workflow_name}]({run_url}) takes quill in hand for this {event_type}...\",\"runSuccess\":\"🪶 The poem is writ! [{workflow_name}]({run_url}) has composed verses most fair. Applause! 👏\",\"runFailure\":\"🎭 Alas! [{workflow_name}]({run_url}) {status}. The muse has fled, leaving verses unsung...\"}"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of uploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of uploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ await main();
+
diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml
index 06d3121a48d..d5fe0f145ef 100644
--- a/.github/workflows/portfolio-analyst.lock.yml
+++ b/.github/workflows/portfolio-analyst.lock.yml
@@ -8446,18 +8446,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8479,7 +8474,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8520,7 +8515,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml
index 9db54f824a3..9fa59df5dfd 100644
--- a/.github/workflows/python-data-charts.lock.yml
+++ b/.github/workflows/python-data-charts.lock.yml
@@ -8743,18 +8743,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8776,7 +8771,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8817,7 +8812,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index df5b1edcc05..fbbdc56a2c9 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -8253,18 +8253,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8286,7 +8281,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8327,7 +8322,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml
index 197e950bad3..6498a0cd600 100644
--- a/.github/workflows/technical-doc-writer.lock.yml
+++ b/.github/workflows/technical-doc-writer.lock.yml
@@ -8775,18 +8775,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8808,7 +8803,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8849,7 +8844,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml
index b2e44ceb8d5..616534bf089 100644
--- a/.github/workflows/unbloat-docs.lock.yml
+++ b/.github/workflows/unbloat-docs.lock.yml
@@ -8982,18 +8982,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -9015,7 +9010,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -9056,7 +9051,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml
index 0f2072e27ae..01766b3bc63 100644
--- a/.github/workflows/weekly-issue-summary.lock.yml
+++ b/.github/workflows/weekly-issue-summary.lock.yml
@@ -8249,18 +8249,13 @@ jobs:
return;
}
const uploadItems = result.items.filter( item => item.type === "upload_asset");
- const legacyUploadAssetsItems = result.items.filter( item => item.type === "upload-assets");
- if (legacyUploadAssetsItems.length > 0) {
- core.warning(`Found ${legacyUploadAssetsItems.length} item(s) with legacy type "upload-assets" (plural). This type is deprecated. Use "upload_asset" (singular) instead.`);
- }
- const allUploadItems = [...uploadItems, ...legacyUploadAssetsItems];
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8282,7 +8277,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8323,7 +8318,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/pkg/workflow/compiler_safe_output_jobs.go b/pkg/workflow/compiler_safe_output_jobs.go
index 832681fc03a..511ea50f0ea 100644
--- a/pkg/workflow/compiler_safe_output_jobs.go
+++ b/pkg/workflow/compiler_safe_output_jobs.go
@@ -63,6 +63,24 @@ func (c *Compiler) buildSafeOutputsJobs(data *WorkflowData, jobName, markdownPat
safeOutputJobNames = append(safeOutputJobNames, safeJobNames...)
compilerSafeOutputJobsLog.Printf("Added %d custom safe-job names to conclusion dependencies", len(safeJobNames))
+ // Build upload_assets job as a separate job if configured
+ // This needs to be separate from the consolidated safe_outputs job because it requires:
+ // 1. Git configuration for pushing to orphaned branches
+ // 2. Checkout with proper credentials
+ // 3. Different permissions (contents: write)
+ if data.SafeOutputs != nil && data.SafeOutputs.UploadAssets != nil {
+ compilerSafeOutputJobsLog.Print("Building separate upload_assets job")
+ uploadAssetsJob, err := c.buildUploadAssetsJob(data, jobName)
+ if err != nil {
+ return fmt.Errorf("failed to build upload_assets job: %w", err)
+ }
+ if err := c.jobManager.AddJob(uploadAssetsJob); err != nil {
+ return fmt.Errorf("failed to add upload_assets job: %w", err)
+ }
+ safeOutputJobNames = append(safeOutputJobNames, uploadAssetsJob.Name)
+ compilerSafeOutputJobsLog.Printf("Added separate upload_assets job")
+ }
+
// Build conclusion job if add-comment is configured OR if command trigger is configured with reactions
// This job runs last, after all safe output jobs (and push_repo_memory if configured), to update the activation comment on failure
// The buildConclusionJob function itself will decide whether to create the job based on the configuration
diff --git a/pkg/workflow/compiler_safe_outputs_core.go b/pkg/workflow/compiler_safe_outputs_core.go
index c351100952b..377612e8e20 100644
--- a/pkg/workflow/compiler_safe_outputs_core.go
+++ b/pkg/workflow/compiler_safe_outputs_core.go
@@ -2,7 +2,6 @@ package workflow
import (
"fmt"
- "strings"
"github.com/githubnext/gh-aw/pkg/constants"
"github.com/githubnext/gh-aw/pkg/logger"
@@ -112,9 +111,8 @@ func (c *Compiler) buildConsolidatedSafeOutputsJob(data *WorkflowData, mainJobNa
if data.SafeOutputs.PushToPullRequestBranch != nil {
scriptNames = append(scriptNames, "push_to_pull_request_branch")
}
- if data.SafeOutputs.UploadAssets != nil {
- scriptNames = append(scriptNames, "upload_assets")
- }
+ // Upload Assets is handled as a separate job (not in consolidated job)
+ // See buildUploadAssetsJob for the separate job implementation
if data.SafeOutputs.UpdateRelease != nil {
scriptNames = append(scriptNames, "update_release")
}
@@ -397,15 +395,9 @@ func (c *Compiler) buildConsolidatedSafeOutputsJob(data *WorkflowData, mainJobNa
permissions.Merge(NewPermissionsContentsWriteIssuesWritePRWrite())
}
- // 18. Upload Assets step
- if data.SafeOutputs.UploadAssets != nil {
- stepConfig := c.buildUploadAssetsStepConfig(data, mainJobName, threatDetectionEnabled)
- stepYAML := c.buildConsolidatedSafeOutputStep(data, stepConfig)
- steps = append(steps, stepYAML...)
- safeOutputStepNames = append(safeOutputStepNames, stepConfig.StepID)
-
- permissions.Merge(NewPermissionsContentsWrite())
- }
+ // 18. Upload Assets - now handled as a separate job (see buildSafeOutputsJobs)
+ // This was moved out of the consolidated job to allow proper git configuration
+ // for pushing to orphaned branches
// 19. Update Release step
if data.SafeOutputs.UpdateRelease != nil {
@@ -655,12 +647,8 @@ func (c *Compiler) buildJobLevelSafeOutputEnvVars(data *WorkflowData, workflowID
}
}
- // Add asset upload configuration if present (applies to all steps)
- if data.SafeOutputs.UploadAssets != nil {
- envVars["GH_AW_ASSETS_BRANCH"] = fmt.Sprintf("%q", data.SafeOutputs.UploadAssets.BranchName)
- envVars["GH_AW_ASSETS_MAX_SIZE_KB"] = fmt.Sprintf("%d", data.SafeOutputs.UploadAssets.MaxSizeKB)
- envVars["GH_AW_ASSETS_ALLOWED_EXTS"] = fmt.Sprintf("%q", strings.Join(data.SafeOutputs.UploadAssets.AllowedExts, ","))
- }
+ // Note: Asset upload configuration is not needed here because upload_assets
+ // is now handled as a separate job (see buildUploadAssetsJob)
return envVars
}
From b605a0e1824b5e45e6f3ddd5d46bccb96c11df26 Mon Sep 17 00:00:00 2001
From: Peli de Halleux
Date: Mon, 22 Dec 2025 19:34:21 +0000
Subject: [PATCH 08/11] recompile
---
.github/workflows/audit-workflows.lock.yml | 155 +++++++++++----
.../copilot-pr-nlp-analysis.lock.yml | 154 +++++++++++----
.../copilot-session-insights.lock.yml | 154 +++++++++++----
.../daily-copilot-token-report.lock.yml | 155 +++++++++++----
.github/workflows/daily-file-diet.lock.yml | 180 ++++++++++++++----
.../workflows/daily-firewall-report.lock.yml | 155 +++++++++++----
.../workflows/daily-issues-report.lock.yml | 155 +++++++++++----
.../daily-multi-device-docs-tester.lock.yml | 115 +++++++++--
.github/workflows/daily-news.lock.yml | 155 +++++++++++----
.../daily-performance-summary.lock.yml | 155 +++++++++++----
.../workflows/daily-repo-chronicle.lock.yml | 155 +++++++++++----
.github/workflows/deep-report.lock.yml | 155 +++++++++++----
.github/workflows/docs-noob-tester.lock.yml | 114 +++++++++--
.../github-mcp-structural-analysis.lock.yml | 154 +++++++++++----
.github/workflows/intelligence.lock.yml | 154 +++++++++++----
.github/workflows/org-health-report.lock.yml | 154 +++++++++++----
.github/workflows/portfolio-analyst.lock.yml | 155 +++++++++++----
.github/workflows/python-data-charts.lock.yml | 154 +++++++++++----
.../workflows/stale-repo-identifier.lock.yml | 155 +++++++++++----
.../workflows/technical-doc-writer.lock.yml | 153 +++++++++++----
.github/workflows/unbloat-docs.lock.yml | 153 +++++++++++----
.../workflows/weekly-issue-summary.lock.yml | 155 +++++++++++----
22 files changed, 2659 insertions(+), 685 deletions(-)
diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml
index 32ca27b985c..1d6f33b3268 100644
--- a/.github/workflows/audit-workflows.lock.yml
+++ b/.github/workflows/audit-workflows.lock.yml
@@ -5619,6 +5619,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6393,13 +6394,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_TRACKER_ID: "audit-workflows-daily"
GH_AW_WORKFLOW_ID: "audit-workflows"
@@ -7584,24 +7582,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Agentic Workflow Audit Agent"
+ GH_AW_TRACKER_ID: "audit-workflows-daily"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -7717,25 +7826,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index 8a4f69736f4..141115850c4 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -6522,6 +6522,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7287,13 +7288,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "copilot-pr-nlp-analysis"
GH_AW_WORKFLOW_NAME: "Copilot PR Conversation NLP Analysis"
@@ -8477,24 +8475,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: copilot-pr-data-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Copilot PR Conversation NLP Analysis"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8610,25 +8718,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: copilot-pr-data-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml
index f5e50df94ed..f8f519683a2 100644
--- a/.github/workflows/copilot-session-insights.lock.yml
+++ b/.github/workflows/copilot-session-insights.lock.yml
@@ -6608,6 +6608,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7379,13 +7380,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_WORKFLOW_ID: "copilot-session-insights"
GH_AW_WORKFLOW_NAME: "Copilot Session Insights"
@@ -8569,24 +8567,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Copilot Session Insights"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8702,25 +8810,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml
index a26eef0647b..ffec99cb981 100644
--- a/.github/workflows/daily-copilot-token-report.lock.yml
+++ b/.github/workflows/daily-copilot-token-report.lock.yml
@@ -6608,6 +6608,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7376,13 +7377,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-copilot-token-report"
GH_AW_WORKFLOW_ID: "daily-copilot-token-report"
@@ -8567,24 +8565,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Copilot Token Consumption Report"
+ GH_AW_TRACKER_ID: "daily-copilot-token-report"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8700,25 +8809,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 76f1370ca6d..5f981aa166b 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -6570,6 +6570,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7757,13 +7758,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-file-diet"
GH_AW_WORKFLOW_ID: "daily-file-diet"
@@ -7793,7 +7791,7 @@ jobs:
owner: ${{ github.repository_owner }}
repositories: ${{ github.event.repository.name }}
github-api-url: ${{ github.api_url }}
- permission-contents: write
+ permission-contents: read
permission-issues: write
- name: Setup JavaScript files
id: setup_scripts
@@ -8733,24 +8731,158 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+ - name: Invalidate GitHub App token
+ if: always() && steps.app-token.outputs.token != ''
+ env:
+ TOKEN: ${{ steps.app-token.outputs.token }}
+ run: |
+ echo "Revoking GitHub App installation token..."
+ # GitHub CLI will auth with the token being revoked.
+ gh api \
+ --method DELETE \
+ -H "Authorization: token $TOKEN" \
+ /installation/token || echo "Token revoke may already be expired."
+
+ echo "Token invalidation step complete."
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Generate GitHub App token
+ id: app-token
+ uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
+ with:
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
+ owner: ${{ github.repository_owner }}
+ repositories: ${{ github.event.repository.name }}
+ github-api-url: ${{ github.api_url }}
+ permission-contents: write
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily File Diet"
+ GH_AW_TRACKER_ID: "daily-file-diet"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ steps.app-token.outputs.token }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8866,7 +8998,7 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
+ await main();
- name: Invalidate GitHub App token
if: always() && steps.app-token.outputs.token != ''
env:
@@ -8881,23 +9013,3 @@ jobs:
echo "Token invalidation step complete."
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
-
diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index 53371842223..bdd3c5e84de 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -6098,6 +6098,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7059,13 +7060,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-firewall-report"
GH_AW_WORKFLOW_ID: "daily-firewall-report"
@@ -8250,24 +8248,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Firewall Logs Collector and Reporter"
+ GH_AW_TRACKER_ID: "daily-firewall-report"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8383,25 +8492,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml
index dc5eb908f8e..950d6bee52d 100644
--- a/.github/workflows/daily-issues-report.lock.yml
+++ b/.github/workflows/daily-issues-report.lock.yml
@@ -6666,6 +6666,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7568,13 +7569,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "codex"
GH_AW_TRACKER_ID: "daily-issues-report"
GH_AW_WORKFLOW_ID: "daily-issues-report"
@@ -9124,24 +9122,135 @@ jobs:
return closedDiscussions;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Issues Report Generator"
+ GH_AW_TRACKER_ID: "daily-issues-report"
+ GH_AW_ENGINE_ID: "codex"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -9257,25 +9366,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml
index fef0723a2e5..65e03f94933 100644
--- a/.github/workflows/daily-multi-device-docs-tester.lock.yml
+++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml
@@ -5446,6 +5446,7 @@ jobs:
- agent
- detection
- safe_outputs
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6221,13 +6222,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_TRACKER_ID: "daily-multi-device-docs-tester"
GH_AW_WORKFLOW_ID: "daily-multi-device-docs-tester"
@@ -7184,24 +7182,115 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Multi-Device Docs Tester"
+ GH_AW_TRACKER_ID: "daily-multi-device-docs-tester"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -7317,5 +7406,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
+ await main();
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index 0a06fc6b13f..d5c51556558 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -6413,6 +6413,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7181,13 +7182,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-news-weekday"
GH_AW_WORKFLOW_ID: "daily-news"
@@ -8372,24 +8370,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily News"
+ GH_AW_TRACKER_ID: "daily-news-weekday"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8505,25 +8614,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml
index b82cd4a363d..72474347bc4 100644
--- a/.github/workflows/daily-performance-summary.lock.yml
+++ b/.github/workflows/daily-performance-summary.lock.yml
@@ -7886,6 +7886,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -8639,13 +8640,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "codex"
GH_AW_TRACKER_ID: "daily-performance-summary"
GH_AW_WORKFLOW_ID: "daily-performance-summary"
@@ -10195,24 +10193,135 @@ jobs:
return closedDiscussions;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Project Performance Summary Generator (Using Safe Inputs)"
+ GH_AW_TRACKER_ID: "daily-performance-summary"
+ GH_AW_ENGINE_ID: "codex"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -10328,25 +10437,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml
index 93745a445ff..a039d7308ff 100644
--- a/.github/workflows/daily-repo-chronicle.lock.yml
+++ b/.github/workflows/daily-repo-chronicle.lock.yml
@@ -6334,6 +6334,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7102,13 +7103,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-repo-chronicle"
GH_AW_WORKFLOW_ID: "daily-repo-chronicle"
@@ -8293,24 +8291,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "The Daily Repository Chronicle"
+ GH_AW_TRACKER_ID: "daily-repo-chronicle"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8426,25 +8535,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index 80c142f9e1e..a45f9a66dc1 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -6154,6 +6154,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7101,13 +7102,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "codex"
GH_AW_TRACKER_ID: "deep-report-intel-agent"
GH_AW_WORKFLOW_ID: "deep-report"
@@ -8292,24 +8290,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: weekly-issues-data-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "DeepReport - Intelligence Gathering Agent"
+ GH_AW_TRACKER_ID: "deep-report-intel-agent"
+ GH_AW_ENGINE_ID: "codex"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8425,25 +8534,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: weekly-issues-data-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml
index 6ca60ee894b..318db1754ad 100644
--- a/.github/workflows/docs-noob-tester.lock.yml
+++ b/.github/workflows/docs-noob-tester.lock.yml
@@ -5805,6 +5805,7 @@ jobs:
- agent
- detection
- safe_outputs
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6570,13 +6571,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "docs-noob-tester"
GH_AW_WORKFLOW_NAME: "Documentation Noob Tester"
@@ -7760,24 +7758,114 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Documentation Noob Tester"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -7893,5 +7981,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
+ await main();
diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml
index 5220e97ed13..aa6f3f08b0b 100644
--- a/.github/workflows/github-mcp-structural-analysis.lock.yml
+++ b/.github/workflows/github-mcp-structural-analysis.lock.yml
@@ -5961,6 +5961,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6732,13 +6733,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_WORKFLOW_ID: "github-mcp-structural-analysis"
GH_AW_WORKFLOW_NAME: "GitHub MCP Structural Analysis"
@@ -7922,24 +7920,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "GitHub MCP Structural Analysis"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8055,25 +8163,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index c7ba61c5764..d9c69fac648 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -6802,6 +6802,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7761,13 +7762,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "intelligence"
GH_AW_WORKFLOW_NAME: "Campaign Intelligence System"
@@ -8723,24 +8721,134 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Campaign Intelligence System"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8856,25 +8964,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml
index 3da9b0eb741..fca2ba551b5 100644
--- a/.github/workflows/org-health-report.lock.yml
+++ b/.github/workflows/org-health-report.lock.yml
@@ -6448,6 +6448,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7213,13 +7214,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "org-health-report"
GH_AW_WORKFLOW_NAME: "Organization Health Report"
@@ -8403,24 +8401,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Organization Health Report"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8536,25 +8644,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml
index d5fe0f145ef..4a45d114d8a 100644
--- a/.github/workflows/portfolio-analyst.lock.yml
+++ b/.github/workflows/portfolio-analyst.lock.yml
@@ -6439,6 +6439,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7207,13 +7208,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "portfolio-analyst-weekly"
GH_AW_WORKFLOW_ID: "portfolio-analyst"
@@ -8398,24 +8396,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Automated Portfolio Analyst"
+ GH_AW_TRACKER_ID: "portfolio-analyst-weekly"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8531,25 +8640,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml
index 9fa59df5dfd..93b00ade7a6 100644
--- a/.github/workflows/python-data-charts.lock.yml
+++ b/.github/workflows/python-data-charts.lock.yml
@@ -6740,6 +6740,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7505,13 +7506,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "python-data-charts"
GH_AW_WORKFLOW_NAME: "Python Data Visualization Generator"
@@ -8695,24 +8693,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Python Data Visualization Generator"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8828,25 +8936,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index fbbdc56a2c9..c856bdf6911 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -6474,6 +6474,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7240,13 +7241,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔍 *Analysis by [{workflow_name}]({run_url})*\",\"runStarted\":\"🔍 Stale Repository Identifier starting! [{workflow_name}]({run_url}) is analyzing repository activity...\",\"runSuccess\":\"✅ Analysis complete! [{workflow_name}]({run_url}) has finished analyzing stale repositories.\",\"runFailure\":\"⚠️ Analysis interrupted! [{workflow_name}]({run_url}) {status}.\"}"
GH_AW_WORKFLOW_ID: "stale-repo-identifier"
@@ -8205,24 +8203,135 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Stale Repository Identifier"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔍 *Analysis by [{workflow_name}]({run_url})*\",\"runStarted\":\"🔍 Stale Repository Identifier starting! [{workflow_name}]({run_url}) is analyzing repository activity...\",\"runSuccess\":\"✅ Analysis complete! [{workflow_name}]({run_url}) has finished analyzing stale repositories.\",\"runFailure\":\"⚠️ Analysis interrupted! [{workflow_name}]({run_url}) {status}.\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8338,25 +8447,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml
index 6498a0cd600..d3f41652b62 100644
--- a/.github/workflows/technical-doc-writer.lock.yml
+++ b/.github/workflows/technical-doc-writer.lock.yml
@@ -6144,6 +6144,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6917,9 +6918,6 @@ jobs:
pull-requests: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📝 *Documentation by [{workflow_name}]({run_url})*\",\"runStarted\":\"✍️ The Technical Writer begins! [{workflow_name}]({run_url}) is documenting this {event_type}...\",\"runSuccess\":\"📝 Documentation complete! [{workflow_name}]({run_url}) has written the docs. Clear as crystal! ✨\",\"runFailure\":\"✍️ Writer's block! [{workflow_name}]({run_url}) {status}. The page remains blank...\"}"
GH_AW_WORKFLOW_ID: "technical-doc-writer"
@@ -8727,24 +8725,135 @@ jobs:
return createdComments;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Technical Doc Writer"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📝 *Documentation by [{workflow_name}]({run_url})*\",\"runStarted\":\"✍️ The Technical Writer begins! [{workflow_name}]({run_url}) is documenting this {event_type}...\",\"runSuccess\":\"📝 Documentation complete! [{workflow_name}]({run_url}) has written the docs. Clear as crystal! ✨\",\"runFailure\":\"✍️ Writer's block! [{workflow_name}]({run_url}) {status}. The page remains blank...\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8860,25 +8969,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml
index 616534bf089..e680ef34f30 100644
--- a/.github/workflows/unbloat-docs.lock.yml
+++ b/.github/workflows/unbloat-docs.lock.yml
@@ -6137,6 +6137,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7122,9 +7123,6 @@ jobs:
pull-requests: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🗜️ *Compressed by [{workflow_name}]({run_url})*\",\"runStarted\":\"📦 Time to slim down! [{workflow_name}]({run_url}) is trimming the excess from this {event_type}...\",\"runSuccess\":\"🗜️ Docs on a diet! [{workflow_name}]({run_url}) has removed the bloat. Lean and mean! 💪\",\"runFailure\":\"📦 Unbloating paused! [{workflow_name}]({run_url}) {status}. The docs remain... fluffy.\"}"
GH_AW_WORKFLOW_ID: "unbloat-docs"
@@ -8934,24 +8932,135 @@ jobs:
return createdComments;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Documentation Unbloat"
+ GH_AW_ENGINE_ID: "claude"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🗜️ *Compressed by [{workflow_name}]({run_url})*\",\"runStarted\":\"📦 Time to slim down! [{workflow_name}]({run_url}) is trimming the excess from this {event_type}...\",\"runSuccess\":\"🗜️ Docs on a diet! [{workflow_name}]({run_url}) has removed the bloat. Lean and mean! 💪\",\"runFailure\":\"📦 Unbloating paused! [{workflow_name}]({run_url}) {status}. The docs remain... fluffy.\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -9067,25 +9176,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml
index 01766b3bc63..3e0fd6664c6 100644
--- a/.github/workflows/weekly-issue-summary.lock.yml
+++ b/.github/workflows/weekly-issue-summary.lock.yml
@@ -6242,6 +6242,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7010,13 +7011,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "weekly-issue-summary"
GH_AW_WORKFLOW_ID: "weekly-issue-summary"
@@ -8201,24 +8199,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Weekly Issue Summary"
+ GH_AW_TRACKER_ID: "weekly-issue-summary"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8334,25 +8443,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
From c15ef44cbc257aeb27b18855834847ceeac52928 Mon Sep 17 00:00:00 2001
From: Peli de Halleux
Date: Mon, 22 Dec 2025 19:35:46 +0000
Subject: [PATCH 09/11] recompile
---
.github/workflows/ai-moderator.lock.yml | 2 +-
.github/workflows/daily-code-metrics.lock.yml | 4 ++--
.github/workflows/dev-hawk.lock.yml | 2 +-
.github/workflows/firewall-escape.lock.yml | 2 +-
.github/workflows/go-pattern-detector.lock.yml | 2 +-
.github/workflows/issue-classifier.lock.yml | 2 +-
.github/workflows/issue-monster.lock.yml | 2 +-
.github/workflows/mcp-inspector.lock.yml | 4 ++--
.github/workflows/notion-issue-summary.lock.yml | 2 +-
.../workflows/prompt-clustering-analysis.lock.yml | 6 +++---
.github/workflows/release.lock.yml | 14 +++++++-------
.github/workflows/safe-output-health.lock.yml | 2 +-
.github/workflows/slide-deck-maintainer.lock.yml | 2 +-
.../workflows/smoke-copilot-playwright.lock.yml | 2 +-
.github/workflows/smoke-detector.lock.yml | 2 +-
.github/workflows/static-analysis-report.lock.yml | 2 +-
.github/workflows/super-linter.lock.yml | 8 ++++----
.github/workflows/tidy.lock.yml | 4 ++--
18 files changed, 32 insertions(+), 32 deletions(-)
diff --git a/.github/workflows/ai-moderator.lock.yml b/.github/workflows/ai-moderator.lock.yml
index 2d9091afdce..aa1d6246acd 100644
--- a/.github/workflows/ai-moderator.lock.yml
+++ b/.github/workflows/ai-moderator.lock.yml
@@ -5804,7 +5804,7 @@ jobs:
- name: Check if actor is external user or GitHub Action bot
id: check_actor
if: ${{ github.event_name != 'workflow_dispatch' }}
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |-
const actor = context.actor;
diff --git a/.github/workflows/daily-code-metrics.lock.yml b/.github/workflows/daily-code-metrics.lock.yml
index fb74a741c6f..8f543dc3754 100644
--- a/.github/workflows/daily-code-metrics.lock.yml
+++ b/.github/workflows/daily-code-metrics.lock.yml
@@ -170,7 +170,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-charts
@@ -178,7 +178,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/dev-hawk.lock.yml b/.github/workflows/dev-hawk.lock.yml
index acfe30b43ef..4d5f09b6fac 100644
--- a/.github/workflows/dev-hawk.lock.yml
+++ b/.github/workflows/dev-hawk.lock.yml
@@ -174,7 +174,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml
index 83c3e846026..85fe48f5989 100644
--- a/.github/workflows/firewall-escape.lock.yml
+++ b/.github/workflows/firewall-escape.lock.yml
@@ -2978,7 +2978,7 @@ jobs:
steps:
- name: Create issue on test failure
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
await github.rest.issues.create({
diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml
index cac892c4463..ceb89fffa31 100644
--- a/.github/workflows/go-pattern-detector.lock.yml
+++ b/.github/workflows/go-pattern-detector.lock.yml
@@ -5384,7 +5384,7 @@ jobs:
found_patterns: ${{ steps.detect.outputs.found_patterns }}
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
persist-credentials: false
- name: Install ast-grep
diff --git a/.github/workflows/issue-classifier.lock.yml b/.github/workflows/issue-classifier.lock.yml
index ddfc3f26710..733eb8392fa 100644
--- a/.github/workflows/issue-classifier.lock.yml
+++ b/.github/workflows/issue-classifier.lock.yml
@@ -2999,7 +2999,7 @@ jobs:
path: /tmp/gh-aw/aw_info.json
if-no-files-found: warn
- name: Run AI Inference
- uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
+ uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v1
env:
GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml
index 727a77b836b..5183490d4ff 100644
--- a/.github/workflows/issue-monster.lock.yml
+++ b/.github/workflows/issue-monster.lock.yml
@@ -8453,7 +8453,7 @@ jobs:
steps:
- name: Search for candidate issues
id: search
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
with:
script: |
const { owner, repo } = context.repo;
diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml
index ed6dc31f959..aa22c34ae09 100644
--- a/.github/workflows/mcp-inspector.lock.yml
+++ b/.github/workflows/mcp-inspector.lock.yml
@@ -6948,7 +6948,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
@@ -7078,7 +7078,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Post message to Slack
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
SLACK_CHANNEL_ID: ${{ env.GH_AW_SLACK_CHANNEL_ID }}
diff --git a/.github/workflows/notion-issue-summary.lock.yml b/.github/workflows/notion-issue-summary.lock.yml
index 3f32f259173..25267cf121d 100644
--- a/.github/workflows/notion-issue-summary.lock.yml
+++ b/.github/workflows/notion-issue-summary.lock.yml
@@ -6344,7 +6344,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml
index 51160fca69d..418d52a1406 100644
--- a/.github/workflows/prompt-clustering-analysis.lock.yml
+++ b/.github/workflows/prompt-clustering-analysis.lock.yml
@@ -173,7 +173,7 @@ jobs:
- name: Set up jq utilities directory
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
@@ -198,7 +198,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-charts
@@ -206,7 +206,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml
index b6f5612fe73..77821c8ff36 100644
--- a/.github/workflows/release.lock.yml
+++ b/.github/workflows/release.lock.yml
@@ -6493,28 +6493,28 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: false
go-version-file: go.mod
- name: Download Go modules
run: go mod download
- name: Generate SBOM (SPDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
with:
artifact-name: sbom.spdx.json
format: spdx-json
output-file: sbom.spdx.json
- name: Generate SBOM (CycloneDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
with:
artifact-name: sbom.cdx.json
format: cyclonedx-json
output-file: sbom.cdx.json
- name: Upload SBOM artifacts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
name: sbom-artifacts
path: |
@@ -6693,12 +6693,12 @@ jobs:
release_tag: ${{ steps.get_release.outputs.release_tag }}
steps:
- name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
fetch-depth: 0
persist-credentials: false
- name: Release with gh-extension-precompile
- uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2.1.0
+ uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2
with:
build_script_override: scripts/build-release.sh
go_version_file: go.mod
diff --git a/.github/workflows/safe-output-health.lock.yml b/.github/workflows/safe-output-health.lock.yml
index ec5fc66f80d..7524b78a10c 100644
--- a/.github/workflows/safe-output-health.lock.yml
+++ b/.github/workflows/safe-output-health.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/slide-deck-maintainer.lock.yml b/.github/workflows/slide-deck-maintainer.lock.yml
index 0a16d1df50b..90ff91eaff8 100644
--- a/.github/workflows/slide-deck-maintainer.lock.yml
+++ b/.github/workflows/slide-deck-maintainer.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
cache: npm
cache-dependency-path: docs/package-lock.json
diff --git a/.github/workflows/smoke-copilot-playwright.lock.yml b/.github/workflows/smoke-copilot-playwright.lock.yml
index 86727061a12..3753ea24ea1 100644
--- a/.github/workflows/smoke-copilot-playwright.lock.yml
+++ b/.github/workflows/smoke-copilot-playwright.lock.yml
@@ -7628,7 +7628,7 @@ jobs:
run: "echo \"📋 Collecting Playwright MCP logs...\"\n\n# Create logs directory\nmkdir -p /tmp/gh-aw/playwright-debug-logs\n\n# Copy any playwright logs from the MCP logs directory\nif [ -d \"/tmp/gh-aw/mcp-logs/playwright\" ]; then\n echo \"Found Playwright MCP logs directory\"\n cp -r /tmp/gh-aw/mcp-logs/playwright/* /tmp/gh-aw/playwright-debug-logs/ 2>/dev/null || true\n ls -la /tmp/gh-aw/playwright-debug-logs/\nelse\n echo \"No Playwright MCP logs directory found at /tmp/gh-aw/mcp-logs/playwright\"\nfi\n\n# List all trace files if any\necho \"Looking for trace files...\"\nfind /tmp -name \"*.zip\" -o -name \"trace*\" 2>/dev/null | head -20 || true\n\n# Show docker container logs if any containers are still running\necho \"Checking for running Docker containers...\"\ndocker ps -a --format \"table {{.Names}}\\t{{.Status}}\\t{{.Image}}\" 2>/dev/null || true\n"
- if: always()
name: Upload Playwright Debug Logs
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
if-no-files-found: ignore
name: playwright-debug-logs-${{ github.run_id }}
diff --git a/.github/workflows/smoke-detector.lock.yml b/.github/workflows/smoke-detector.lock.yml
index 6ddb3c587fd..074c6beb81b 100644
--- a/.github/workflows/smoke-detector.lock.yml
+++ b/.github/workflows/smoke-detector.lock.yml
@@ -595,7 +595,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml
index d535c09e308..3b0575cce57 100644
--- a/.github/workflows/static-analysis-report.lock.yml
+++ b/.github/workflows/static-analysis-report.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml
index 3f60c9a5651..0dd91e497f4 100644
--- a/.github/workflows/super-linter.lock.yml
+++ b/.github/workflows/super-linter.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Download super-linter log
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6
with:
name: super-linter-log
path: /tmp/gh-aw/
@@ -7540,13 +7540,13 @@ jobs:
steps:
- name: Checkout Code
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
with:
fetch-depth: 0
persist-credentials: false
- name: Super-linter
id: super-linter
- uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.3.1
+ uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.2.1
env:
CREATE_LOG_FILE: "true"
DEFAULT_BRANCH: main
@@ -7568,7 +7568,7 @@ jobs:
fi
- name: Upload super-linter log
if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
name: super-linter-log
path: super-linter.log
diff --git a/.github/workflows/tidy.lock.yml b/.github/workflows/tidy.lock.yml
index 541228a7036..dc569efd4f9 100644
--- a/.github/workflows/tidy.lock.yml
+++ b/.github/workflows/tidy.lock.yml
@@ -577,13 +577,13 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
with:
cache: npm
cache-dependency-path: pkg/workflow/js/package-lock.json
node-version: "24"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
with:
cache: true
go-version-file: go.mod
From c3d658c6d51cb04f6c179f833e68f6e07c994e0b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 19:58:45 +0000
Subject: [PATCH 10/11] Fix linting and update dev.md for Python graph upload
test
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/ai-moderator.lock.yml | 2 +-
.github/workflows/audit-workflows.lock.yml | 6 +-
.../copilot-pr-nlp-analysis.lock.yml | 4 +-
.../copilot-session-insights.lock.yml | 4 +-
.github/workflows/daily-code-metrics.lock.yml | 4 +-
.../daily-copilot-token-report.lock.yml | 4 +-
.github/workflows/daily-file-diet.lock.yml | 4 +-
.../workflows/daily-firewall-report.lock.yml | 6 +-
.../workflows/daily-issues-report.lock.yml | 4 +-
.github/workflows/daily-news.lock.yml | 4 +-
.../daily-performance-summary.lock.yml | 4 +-
.../workflows/daily-repo-chronicle.lock.yml | 4 +-
.github/workflows/deep-report.lock.yml | 2 +-
.github/workflows/dev-hawk.lock.yml | 2 +-
.github/workflows/dev.lock.yml | 649 +++++++++++++++++-
.github/workflows/dev.md | 34 +-
.github/workflows/firewall-escape.lock.yml | 2 +-
.../github-mcp-structural-analysis.lock.yml | 4 +-
.../workflows/go-pattern-detector.lock.yml | 2 +-
.github/workflows/intelligence.lock.yml | 4 +-
.github/workflows/issue-monster.lock.yml | 2 +-
.github/workflows/mcp-inspector.lock.yml | 4 +-
.../workflows/notion-issue-summary.lock.yml | 2 +-
.github/workflows/org-health-report.lock.yml | 4 +-
.github/workflows/portfolio-analyst.lock.yml | 6 +-
.../prompt-clustering-analysis.lock.yml | 6 +-
.github/workflows/python-data-charts.lock.yml | 4 +-
.github/workflows/release.lock.yml | 8 +-
.github/workflows/safe-output-health.lock.yml | 2 +-
.../workflows/slide-deck-maintainer.lock.yml | 2 +-
.../smoke-copilot-playwright.lock.yml | 2 +-
.github/workflows/smoke-detector.lock.yml | 2 +-
.../workflows/stale-repo-identifier.lock.yml | 8 +-
.../workflows/static-analysis-report.lock.yml | 2 +-
.github/workflows/super-linter.lock.yml | 6 +-
.github/workflows/tidy.lock.yml | 4 +-
.../workflows/weekly-issue-summary.lock.yml | 4 +-
pkg/workflow/compiler_safe_outputs_shared.go | 20 -
38 files changed, 711 insertions(+), 126 deletions(-)
diff --git a/.github/workflows/ai-moderator.lock.yml b/.github/workflows/ai-moderator.lock.yml
index aa1d6246acd..2d9091afdce 100644
--- a/.github/workflows/ai-moderator.lock.yml
+++ b/.github/workflows/ai-moderator.lock.yml
@@ -5804,7 +5804,7 @@ jobs:
- name: Check if actor is external user or GitHub Action bot
id: check_actor
if: ${{ github.event_name != 'workflow_dispatch' }}
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |-
const actor = context.actor;
diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml
index ee05e2402d1..89d6836261b 100644
--- a/.github/workflows/audit-workflows.lock.yml
+++ b/.github/workflows/audit-workflows.lock.yml
@@ -171,7 +171,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
@@ -193,7 +193,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -201,7 +201,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index 102e9752d52..bf4816186cf 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -177,7 +177,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -185,7 +185,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml
index 75cfd6d4e64..ace2268b63e 100644
--- a/.github/workflows/copilot-session-insights.lock.yml
+++ b/.github/workflows/copilot-session-insights.lock.yml
@@ -184,7 +184,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -192,7 +192,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/daily-code-metrics.lock.yml b/.github/workflows/daily-code-metrics.lock.yml
index 8f543dc3754..fb74a741c6f 100644
--- a/.github/workflows/daily-code-metrics.lock.yml
+++ b/.github/workflows/daily-code-metrics.lock.yml
@@ -170,7 +170,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -178,7 +178,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml
index ea3e3d2c3a3..31fbeab4bdc 100644
--- a/.github/workflows/daily-copilot-token-report.lock.yml
+++ b/.github/workflows/daily-copilot-token-report.lock.yml
@@ -173,7 +173,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -181,7 +181,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 5f6fe287807..df790e2de14 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -189,7 +189,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -197,7 +197,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index 4c340787878..85760a9a6b6 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -170,7 +170,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
@@ -190,7 +190,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -198,7 +198,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml
index 99f37dea804..5234acf4540 100644
--- a/.github/workflows/daily-issues-report.lock.yml
+++ b/.github/workflows/daily-issues-report.lock.yml
@@ -187,7 +187,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -195,7 +195,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index 420c7b9d32f..04cc50bb64b 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -179,7 +179,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -187,7 +187,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml
index f0c3568f081..4dfb57dfd70 100644
--- a/.github/workflows/daily-performance-summary.lock.yml
+++ b/.github/workflows/daily-performance-summary.lock.yml
@@ -176,7 +176,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -184,7 +184,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml
index 8b8e0c77786..2c47c0d0213 100644
--- a/.github/workflows/daily-repo-chronicle.lock.yml
+++ b/.github/workflows/daily-repo-chronicle.lock.yml
@@ -174,7 +174,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -182,7 +182,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index 7174dd4a1fb..36ec4546456 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -179,7 +179,7 @@ jobs:
name: Fetch weekly issues data
run: "# Create output directories\nmkdir -p /tmp/gh-aw/weekly-issues-data\nmkdir -p /tmp/gh-aw/cache-memory\n\n# Get today's date for cache identification\nTODAY=$(date '+%Y-%m-%d')\nCACHE_DIR=\"/tmp/gh-aw/cache-memory\"\n\n# Check if cached data exists from today\nif [ -f \"$CACHE_DIR/weekly-issues-${TODAY}.json\" ] && [ -s \"$CACHE_DIR/weekly-issues-${TODAY}.json\" ]; then\n echo \"✓ Found cached weekly issues data from ${TODAY}\"\n cp \"$CACHE_DIR/weekly-issues-${TODAY}.json\" /tmp/gh-aw/weekly-issues-data/issues.json\n \n # Regenerate schema if missing\n if [ ! -f \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\" ]; then\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/weekly-issues-data/issues.json > \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\"\n fi\n cp \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\" /tmp/gh-aw/weekly-issues-data/issues-schema.json\n \n echo \"Using cached data from ${TODAY}\"\n echo \"Total issues in cache: $(jq 'length' /tmp/gh-aw/weekly-issues-data/issues.json)\"\nelse\n echo \"⬇ Downloading fresh weekly issues data...\"\n \n # Calculate date 7 days ago (cross-platform: GNU date first, BSD fallback)\n DATE_7_DAYS_AGO=$(date -d '7 days ago' '+%Y-%m-%d' 2>/dev/null || date -v-7d '+%Y-%m-%d')\n \n echo \"Fetching issues created or updated since ${DATE_7_DAYS_AGO}...\"\n \n # Fetch issues from the last 7 days using gh CLI\n # Using --search with updated filter to get recent activity\n gh issue list --repo ${{ github.repository }} \\\n --search \"updated:>=${DATE_7_DAYS_AGO}\" \\\n --state all \\\n --json number,title,author,createdAt,state,url,body,labels,updatedAt,closedAt,milestone,assignees,comments \\\n --limit 500 \\\n > /tmp/gh-aw/weekly-issues-data/issues.json\n\n # Generate schema for reference\n /tmp/gh-aw/jqschema.sh < /tmp/gh-aw/weekly-issues-data/issues.json > /tmp/gh-aw/weekly-issues-data/issues-schema.json\n\n # Store in cache with today's date\n cp /tmp/gh-aw/weekly-issues-data/issues.json \"$CACHE_DIR/weekly-issues-${TODAY}.json\"\n cp /tmp/gh-aw/weekly-issues-data/issues-schema.json \"$CACHE_DIR/weekly-issues-${TODAY}-schema.json\"\n\n echo \"✓ Weekly issues data saved to cache: weekly-issues-${TODAY}.json\"\n echo \"Total issues found: $(jq 'length' /tmp/gh-aw/weekly-issues-data/issues.json)\"\nfi\n\n# Always ensure data is available at expected locations for backward compatibility\necho \"Weekly issues data available at: /tmp/gh-aw/weekly-issues-data/issues.json\"\necho \"Schema available at: /tmp/gh-aw/weekly-issues-data/issues-schema.json\""
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/dev-hawk.lock.yml b/.github/workflows/dev-hawk.lock.yml
index 4d5f09b6fac..acfe30b43ef 100644
--- a/.github/workflows/dev-hawk.lock.yml
+++ b/.github/workflows/dev-hawk.lock.yml
@@ -174,7 +174,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml
index bf87f42dce6..fd1a22f1edf 100644
--- a/.github/workflows/dev.lock.yml
+++ b/.github/workflows/dev.lock.yml
@@ -19,11 +19,12 @@
# gh aw compile
# For more information: https://github.com/githubnext/gh-aw/blob/main/.github/aw/github-agentic-workflows.md
#
-# Test MCP gateway with issue creation in staged mode
+# Test upload-asset with Python graph generation
#
# Resolved workflow manifest:
# Imports:
# - shared/gh.md
+# - shared/python-dataviz.md
name: "Dev"
"on":
@@ -140,6 +141,9 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -159,6 +163,44 @@ jobs:
mkdir -p /tmp/gh-aw/agent
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
+ - name: Setup Python environment
+ run: "# Create working directory for Python scripts\nmkdir -p /tmp/gh-aw/python\nmkdir -p /tmp/gh-aw/python/data\nmkdir -p /tmp/gh-aw/python/charts\nmkdir -p /tmp/gh-aw/python/artifacts\n\necho \"Python environment setup complete\"\necho \"Working directory: /tmp/gh-aw/python\"\necho \"Data directory: /tmp/gh-aw/python/data\"\necho \"Charts directory: /tmp/gh-aw/python/charts\"\necho \"Artifacts directory: /tmp/gh-aw/python/artifacts\"\n"
+ - name: Install Python scientific libraries
+ run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
+ - if: always()
+ name: Upload generated charts
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ if-no-files-found: warn
+ name: data-charts
+ path: /tmp/gh-aw/python/charts/*.png
+ retention-days: 30
+ - if: always()
+ name: Upload source files and data
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ if-no-files-found: warn
+ name: python-source-and-data
+ path: |
+ /tmp/gh-aw/python/*.py
+ /tmp/gh-aw/python/data/*
+ retention-days: 30
+
+ # Cache memory file share configuration from frontmatter processed below
+ - name: Create cache-memory directory
+ run: |
+ mkdir -p /tmp/gh-aw/cache-memory
+ echo "Cache memory directory created at /tmp/gh-aw/cache-memory"
+ echo "This folder provides persistent file storage across workflow runs"
+ echo "LLMs and agentic tools can freely read and write files in this directory"
+ - name: Restore cache memory file share data
+ uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+ restore-keys: |
+ memory-${{ github.workflow }}-
+ memory-
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -288,12 +330,12 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
+ {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":5}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
{
- "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[Poetry Test] \".",
+ "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[Dev Test] \".",
"inputSchema": {
"additionalProperties": false,
"properties": {
@@ -332,6 +374,23 @@ jobs:
},
"name": "create_issue"
},
+ {
+ "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum 5 asset(s) can be uploaded. Maximum file size: 10240KB. Allowed file extensions: [.png .jpg].",
+ "inputSchema": {
+ "additionalProperties": false,
+ "properties": {
+ "path": {
+ "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "path"
+ ],
+ "type": "object"
+ },
+ "name": "upload_asset"
+ },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -444,6 +503,15 @@ jobs:
"maxLength": 65000
}
}
+ },
+ "upload_asset": {
+ "defaultMax": 10,
+ "fields": {
+ "path": {
+ "required": true,
+ "type": "string"
+ }
+ }
}
}
EOF
@@ -3173,6 +3241,9 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
+ GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}
GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -3382,7 +3453,7 @@ jobs:
event_name: context.eventName,
staged: false,
network_mode: "defaults",
- allowed_domains: ["api.github.com"],
+ allowed_domains: ["api.github.com","defaults","python"],
firewall_enabled: true,
awf_version: "v0.7.0",
steps: {
@@ -3463,22 +3534,268 @@ jobs:
- # Test MCP Gateway: Read Last Issue and Write Poem in Staged Mode
+ # Python Data Visualization Guide
+
+ Python scientific libraries have been installed and are ready for use. A temporary folder structure has been created at `/tmp/gh-aw/python/` for organizing scripts, data, and outputs.
+
+ ## Installed Libraries
+
+ - **NumPy**: Array processing and numerical operations
+ - **Pandas**: Data manipulation and analysis
+ - **Matplotlib**: Chart generation and plotting
+ - **Seaborn**: Statistical data visualization
+ - **SciPy**: Scientific computing utilities
+
+ ## Directory Structure
+
+ ```
+ /tmp/gh-aw/python/
+ ├── data/ # Store all data files here (CSV, JSON, etc.)
+ ├── charts/ # Generated chart images (PNG)
+ ├── artifacts/ # Additional output files
+ └── *.py # Python scripts
+ ```
+
+ ## Data Separation Requirement
+
+ **CRITICAL**: Data must NEVER be inlined in Python code. Always store data in external files and load using pandas.
+
+ ### ❌ PROHIBITED - Inline Data
+ ```python
+ # DO NOT do this
+ data = [10, 20, 30, 40, 50]
+ labels = ['A', 'B', 'C', 'D', 'E']
+ ```
+
+ ### ✅ REQUIRED - External Data Files
+ ```python
+ # Always load data from external files
+ import pandas as pd
+
+ # Load data from CSV
+ data = pd.read_csv('/tmp/gh-aw/python/data/data.csv')
+
+ # Or from JSON
+ data = pd.read_json('/tmp/gh-aw/python/data/data.json')
+ ```
+
+ ## Chart Generation Best Practices
+
+ ### High-Quality Chart Settings
+
+ ```python
+ import matplotlib.pyplot as plt
+ import seaborn as sns
+
+ # Set style for better aesthetics
+ sns.set_style("whitegrid")
+ sns.set_palette("husl")
+
+ # Create figure with high DPI
+ fig, ax = plt.subplots(figsize=(10, 6), dpi=300)
+
+ # Your plotting code here
+ # ...
+
+ # Save with high quality
+ plt.savefig('/tmp/gh-aw/python/charts/chart.png',
+ dpi=300,
+ bbox_inches='tight',
+ facecolor='white',
+ edgecolor='none')
+ ```
+
+ ### Chart Quality Guidelines
+
+ - **DPI**: Use 300 or higher for publication quality
+ - **Figure Size**: Standard is 10x6 inches (adjustable based on needs)
+ - **Labels**: Always include clear axis labels and titles
+ - **Legend**: Add legends when plotting multiple series
+ - **Grid**: Enable grid lines for easier reading
+ - **Colors**: Use colorblind-friendly palettes (seaborn defaults are good)
- Read the most recent issue from the repository and write a creative poem about it in a new issue using **staged mode** (preview mode).
+ ## Including Images in Reports
+
+ When creating reports (issues, discussions, etc.), use the `upload asset` tool to make images URL-addressable and include them in markdown:
+
+ ### Step 1: Generate and Upload Chart
+ ```python
+ # Generate your chart
+ plt.savefig('/tmp/gh-aw/python/charts/my_chart.png', dpi=300, bbox_inches='tight')
+ ```
+
+ ### Step 2: Upload as Asset
+ Use the `upload asset` tool to upload the chart file. The tool will return a GitHub raw content URL.
+
+ ### Step 3: Include in Markdown Report
+ When creating your discussion or issue, include the image using markdown:
+
+ ```markdown
+ ## Visualization Results
+
+ 
+
+ The chart above shows...
+ ```
+
+ **Important**: Assets are published to an orphaned git branch and become URL-addressable after workflow completion.
+
+ ## Cache Memory Integration
+
+ The cache memory at `/tmp/gh-aw/cache-memory/` is available for storing reusable code:
+
+ **Helper Functions to Cache:**
+ - Data loading utilities: `data_loader.py`
+ - Chart styling functions: `chart_utils.py`
+ - Common data transformations: `transforms.py`
+
+ **Check Cache Before Creating:**
+ ```bash
+ # Check if helper exists in cache
+ if [ -f /tmp/gh-aw/cache-memory/data_loader.py ]; then
+ cp /tmp/gh-aw/cache-memory/data_loader.py /tmp/gh-aw/python/
+ echo "Using cached data_loader.py"
+ fi
+ ```
+
+ **Save to Cache for Future Runs:**
+ ```bash
+ # Save useful helpers to cache
+ cp /tmp/gh-aw/python/data_loader.py /tmp/gh-aw/cache-memory/
+ echo "Saved data_loader.py to cache for future runs"
+ ```
+
+ ## Complete Example Workflow
+
+ ```python
+ #!/usr/bin/env python3
+ """
+ Example data visualization script
+ Generates a bar chart from external data
+ """
+ import pandas as pd
+ import matplotlib.pyplot as plt
+ import seaborn as sns
+
+ # Set style
+ sns.set_style("whitegrid")
+ sns.set_palette("husl")
+
+ # Load data from external file (NEVER inline)
+ data = pd.read_csv('/tmp/gh-aw/python/data/data.csv')
+
+ # Process data
+ summary = data.groupby('category')['value'].sum()
+
+ # Create chart
+ fig, ax = plt.subplots(figsize=(10, 6), dpi=300)
+ summary.plot(kind='bar', ax=ax)
+
+ # Customize
+ ax.set_title('Data Summary by Category', fontsize=16, fontweight='bold')
+ ax.set_xlabel('Category', fontsize=12)
+ ax.set_ylabel('Value', fontsize=12)
+ ax.grid(True, alpha=0.3)
+
+ # Save chart
+ plt.savefig('/tmp/gh-aw/python/charts/chart.png',
+ dpi=300,
+ bbox_inches='tight',
+ facecolor='white')
+
+ print("Chart saved to /tmp/gh-aw/python/charts/chart.png")
+ ```
+
+ ## Error Handling
+
+ **Check File Existence:**
+ ```python
+ import os
+
+ data_file = '/tmp/gh-aw/python/data/data.csv'
+ if not os.path.exists(data_file):
+ raise FileNotFoundError(f"Data file not found: {data_file}")
+ ```
+
+ **Validate Data:**
+ ```python
+ # Check for required columns
+ required_cols = ['category', 'value']
+ missing = set(required_cols) - set(data.columns)
+ if missing:
+ raise ValueError(f"Missing columns: {missing}")
+ ```
+
+ ## Artifact Upload
+
+ Charts and source files are automatically uploaded as artifacts:
+
+ **Charts Artifact:**
+ - Name: `data-charts`
+ - Contents: PNG files from `/tmp/gh-aw/python/charts/`
+ - Retention: 30 days
+
+ **Source and Data Artifact:**
+ - Name: `python-source-and-data`
+ - Contents: Python scripts and data files
+ - Retention: 30 days
+
+ Both artifacts are uploaded with `if: always()` condition, ensuring they're available even if the workflow fails.
+
+ ## Tips for Success
+
+ 1. **Always Separate Data**: Store data in files, never inline in code
+ 2. **Use Cache Memory**: Store reusable helpers for faster execution
+ 3. **High Quality Charts**: Use DPI 300+ and proper sizing
+ 4. **Clear Documentation**: Add docstrings and comments
+ 5. **Error Handling**: Validate data and check file existence
+ 6. **Type Hints**: Use type annotations for better code quality
+ 7. **Seaborn Defaults**: Leverage seaborn for better aesthetics
+ 8. **Reproducibility**: Set random seeds when needed
+
+ ## Common Data Sources
+
+ Based on common use cases:
+
+ **Repository Statistics:**
+ ```python
+ # Collect via GitHub API, save to data.csv
+ # Then load and visualize
+ data = pd.read_csv('/tmp/gh-aw/python/data/repo_stats.csv')
+ ```
+
+ **Workflow Metrics:**
+ ```python
+ # Collect via GitHub Actions API, save to data.json
+ data = pd.read_json('/tmp/gh-aw/python/data/workflow_metrics.json')
+ ```
+
+ **Sample Data Generation:**
+ ```python
+ # Generate with NumPy, save to file first
+ import numpy as np
+ data = np.random.randn(100, 2)
+ df = pd.DataFrame(data, columns=['x', 'y'])
+ df.to_csv('/tmp/gh-aw/python/data/sample_data.csv', index=False)
+
+ # Then load it back (demonstrating the pattern)
+ data = pd.read_csv('/tmp/gh-aw/python/data/sample_data.csv')
+ ```
+
+ # Test Upload Asset with Python Graph Generation
+
+ Create a dummy graph using Python and matplotlib, then upload it as an asset.
**Requirements:**
- 1. Use the GitHub tools to fetch the most recent issue from this repository
- 2. Read the issue title and body to understand what it's about
- 3. Write a short, creative poem (4-6 lines) inspired by the content of that issue
- 4. Create a new issue with:
- - Title: Start with the prefix "[Poetry Test]" followed by a creative title that relates to the original issue
- - Body: Your poem about the issue, plus a reference to the original issue number
- 5. **IMPORTANT**: Use staged mode (add `staged: true` to your create-issue call) so the issue is previewed with the 🎭 indicator but not actually created
- 6. Confirm that:
- - You successfully read the last issue
- - You created a poem inspired by it
- - The new issue was created in staged mode with the 🎭 indicator
+ 1. Use Python to create a simple graph (e.g., a sine wave or bar chart) using matplotlib
+ 2. Save the graph as a PNG file to /tmp/graph.png
+ 3. Use the `upload_asset` tool to upload the graph
+ 4. The tool should return a URL where the graph can be accessed
+ 5. Create an issue that includes the graph using markdown image syntax
+ 6. Verify that:
+ - The graph file was created successfully
+ - The asset was uploaded and a URL was returned
+ - The issue was created with the embedded graph image
PROMPT_EOF
- name: Append XPIA security instructions to prompt
@@ -3513,6 +3830,31 @@ jobs:
When you need to create temporary files or directories during your work, always use the /tmp/gh-aw/agent/ directory that has been pre-created for you. Do NOT use the root /tmp/ directory directly.
+ PROMPT_EOF
+ - name: Append cache memory instructions to prompt
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ run: |
+ cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT"
+
+ ---
+
+ ## Cache Folder Available
+
+ You have access to a persistent cache folder at `/tmp/gh-aw/cache-memory/` where you can read and write files to create memories and store information.
+
+ - **Read/Write Access**: You can freely read from and write to any files in this folder
+ - **Persistence**: Files in this folder persist across workflow runs via GitHub Actions cache
+ - **Last Write Wins**: If multiple processes write to the same file, the last write will be preserved
+ - **File Share**: Use this as a simple file share - organize files as you see fit
+
+ Examples of what you can store:
+ - `/tmp/gh-aw/cache-memory/notes.txt` - general notes and observations
+ - `/tmp/gh-aw/cache-memory/preferences.json` - user preferences and settings
+ - `/tmp/gh-aw/cache-memory/history.log` - activity history and logs
+ - `/tmp/gh-aw/cache-memory/state/` - organized state files in subdirectories
+
+ Feel free to create, read, update, and organize files in this folder as needed for your tasks.
PROMPT_EOF
- name: Append safe outputs instructions to prompt
env:
@@ -3527,7 +3869,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3813,18 +4155,18 @@ jobs:
- name: Execute GitHub Copilot CLI
id: agentic_execution
# Copilot CLI tool arguments (sorted):
- # --allow-tool github
- # --allow-tool safeinputs
- # --allow-tool safeoutputs
timeout-minutes: 5
run: |
set -o pipefail
sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --image-tag 0.7.0 \
- -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool safeinputs --allow-tool safeoutputs --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \
+ -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \
2>&1 | tee /tmp/gh-aw/agent-stdio.log
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
@@ -6865,6 +7207,19 @@ jobs:
name: agent-stdio.log
path: /tmp/gh-aw/agent-stdio.log
if-no-files-found: warn
+ - name: Upload cache-memory data as artifact
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ if: always()
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Upload safe outputs assets
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7108,6 +7463,8 @@ jobs:
- agent
- detection
- safe_outputs
+ - update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7645,7 +8002,7 @@ jobs:
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
WORKFLOW_NAME: "Dev"
- WORKFLOW_DESCRIPTION: "Test MCP gateway with issue creation in staged mode"
+ WORKFLOW_DESCRIPTION: "Test upload-asset with Python graph generation"
with:
script: |
const fs = require('fs');
@@ -8540,7 +8897,7 @@ jobs:
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- GH_AW_ISSUE_TITLE_PREFIX: "[Poetry Test] "
+ GH_AW_ISSUE_TITLE_PREFIX: "[Dev Test] "
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -8834,3 +9191,247 @@ jobs:
await main();
})();
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
+ id: upload_assets
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg"
+ GH_AW_WORKFLOW_NAME: "Dev"
+ GH_AW_ENGINE_ID: "copilot"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of uploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of uploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ await main();
+
diff --git a/.github/workflows/dev.md b/.github/workflows/dev.md
index bd673f95997..b5efd5d9f0d 100644
--- a/.github/workflows/dev.md
+++ b/.github/workflows/dev.md
@@ -2,7 +2,7 @@
on:
workflow_dispatch:
name: Dev
-description: Test MCP gateway with issue creation in staged mode
+description: Test upload-asset with Python graph generation
timeout-minutes: 5
strict: true
engine: copilot
@@ -18,27 +18,31 @@ sandbox:
tools:
github:
toolsets: [issues]
+
safe-outputs:
+ upload-asset:
+ allowed-exts: [".png", ".jpg"]
+ max: 5
create-issue:
- title-prefix: "[Poetry Test] "
+ title-prefix: "[Dev Test] "
max: 1
+
imports:
- shared/gh.md
+ - shared/python-dataviz.md
---
-# Test MCP Gateway: Read Last Issue and Write Poem in Staged Mode
+# Test Upload Asset with Python Graph Generation
-Read the most recent issue from the repository and write a creative poem about it in a new issue using **staged mode** (preview mode).
+Create a dummy graph using Python and matplotlib, then upload it as an asset.
**Requirements:**
-1. Use the GitHub tools to fetch the most recent issue from this repository
-2. Read the issue title and body to understand what it's about
-3. Write a short, creative poem (4-6 lines) inspired by the content of that issue
-4. Create a new issue with:
- - Title: Start with the prefix "[Poetry Test]" followed by a creative title that relates to the original issue
- - Body: Your poem about the issue, plus a reference to the original issue number
-5. **IMPORTANT**: Use staged mode (add `staged: true` to your create-issue call) so the issue is previewed with the 🎭 indicator but not actually created
-6. Confirm that:
- - You successfully read the last issue
- - You created a poem inspired by it
- - The new issue was created in staged mode with the 🎭 indicator
+1. Use Python to create a simple graph (e.g., a sine wave or bar chart) using matplotlib
+2. Save the graph as a PNG file to /tmp/graph.png
+3. Use the `upload_asset` tool to upload the graph
+4. The tool should return a URL where the graph can be accessed
+5. Create an issue that includes the graph using markdown image syntax
+6. Verify that:
+ - The graph file was created successfully
+ - The asset was uploaded and a URL was returned
+ - The issue was created with the embedded graph image
diff --git a/.github/workflows/firewall-escape.lock.yml b/.github/workflows/firewall-escape.lock.yml
index 85fe48f5989..83c3e846026 100644
--- a/.github/workflows/firewall-escape.lock.yml
+++ b/.github/workflows/firewall-escape.lock.yml
@@ -2978,7 +2978,7 @@ jobs:
steps:
- name: Create issue on test failure
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
await github.rest.issues.create({
diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml
index 43c53420980..30ae838bb2f 100644
--- a/.github/workflows/github-mcp-structural-analysis.lock.yml
+++ b/.github/workflows/github-mcp-structural-analysis.lock.yml
@@ -175,7 +175,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -183,7 +183,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/go-pattern-detector.lock.yml b/.github/workflows/go-pattern-detector.lock.yml
index ceb89fffa31..cac892c4463 100644
--- a/.github/workflows/go-pattern-detector.lock.yml
+++ b/.github/workflows/go-pattern-detector.lock.yml
@@ -5384,7 +5384,7 @@ jobs:
found_patterns: ${{ steps.detect.outputs.found_patterns }}
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
persist-credentials: false
- name: Install ast-grep
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index 4dbba08e2eb..55904c9d354 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -182,7 +182,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -190,7 +190,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/issue-monster.lock.yml b/.github/workflows/issue-monster.lock.yml
index 5183490d4ff..727a77b836b 100644
--- a/.github/workflows/issue-monster.lock.yml
+++ b/.github/workflows/issue-monster.lock.yml
@@ -8453,7 +8453,7 @@ jobs:
steps:
- name: Search for candidate issues
id: search
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
with:
script: |
const { owner, repo } = context.repo;
diff --git a/.github/workflows/mcp-inspector.lock.yml b/.github/workflows/mcp-inspector.lock.yml
index aa22c34ae09..ed6dc31f959 100644
--- a/.github/workflows/mcp-inspector.lock.yml
+++ b/.github/workflows/mcp-inspector.lock.yml
@@ -6948,7 +6948,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
@@ -7078,7 +7078,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Post message to Slack
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
SLACK_CHANNEL_ID: ${{ env.GH_AW_SLACK_CHANNEL_ID }}
diff --git a/.github/workflows/notion-issue-summary.lock.yml b/.github/workflows/notion-issue-summary.lock.yml
index 25267cf121d..3f32f259173 100644
--- a/.github/workflows/notion-issue-summary.lock.yml
+++ b/.github/workflows/notion-issue-summary.lock.yml
@@ -6344,7 +6344,7 @@ jobs:
find "/tmp/gh-aw/safe-jobs/" -type f -print
echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safe-jobs/agent_output.json" >> "$GITHUB_ENV"
- name: Add comment to Notion page
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
NOTION_API_TOKEN: ${{ secrets.NOTION_API_TOKEN }}
NOTION_PAGE_ID: ${{ vars.NOTION_PAGE_ID }}
diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml
index 62e081383fd..60b585324b8 100644
--- a/.github/workflows/org-health-report.lock.yml
+++ b/.github/workflows/org-health-report.lock.yml
@@ -176,7 +176,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -184,7 +184,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml
index 9487d05aacc..94c90a48f83 100644
--- a/.github/workflows/portfolio-analyst.lock.yml
+++ b/.github/workflows/portfolio-analyst.lock.yml
@@ -171,7 +171,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
@@ -193,7 +193,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -201,7 +201,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/prompt-clustering-analysis.lock.yml b/.github/workflows/prompt-clustering-analysis.lock.yml
index 418d52a1406..51160fca69d 100644
--- a/.github/workflows/prompt-clustering-analysis.lock.yml
+++ b/.github/workflows/prompt-clustering-analysis.lock.yml
@@ -173,7 +173,7 @@ jobs:
- name: Set up jq utilities directory
run: "mkdir -p /tmp/gh-aw\ncat > /tmp/gh-aw/jqschema.sh << 'EOF'\n#!/usr/bin/env bash\n# jqschema.sh\njq -c '\ndef walk(f):\n . as $in |\n if type == \"object\" then\n reduce keys[] as $k ({}; . + {($k): ($in[$k] | walk(f))})\n elif type == \"array\" then\n if length == 0 then [] else [.[0] | walk(f)] end\n else\n type\n end;\nwalk(.)\n'\nEOF\nchmod +x /tmp/gh-aw/jqschema.sh"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
@@ -198,7 +198,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -206,7 +206,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml
index 306f7d397d2..cf486d2814f 100644
--- a/.github/workflows/python-data-charts.lock.yml
+++ b/.github/workflows/python-data-charts.lock.yml
@@ -172,7 +172,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -180,7 +180,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml
index 77821c8ff36..e854eb4eeac 100644
--- a/.github/workflows/release.lock.yml
+++ b/.github/workflows/release.lock.yml
@@ -6493,9 +6493,9 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: false
go-version-file: go.mod
@@ -6514,7 +6514,7 @@ jobs:
format: cyclonedx-json
output-file: sbom.cdx.json
- name: Upload SBOM artifacts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: sbom-artifacts
path: |
@@ -6693,7 +6693,7 @@ jobs:
release_tag: ${{ steps.get_release.outputs.release_tag }}
steps:
- name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
fetch-depth: 0
persist-credentials: false
diff --git a/.github/workflows/safe-output-health.lock.yml b/.github/workflows/safe-output-health.lock.yml
index 7524b78a10c..ec5fc66f80d 100644
--- a/.github/workflows/safe-output-health.lock.yml
+++ b/.github/workflows/safe-output-health.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/slide-deck-maintainer.lock.yml b/.github/workflows/slide-deck-maintainer.lock.yml
index 90ff91eaff8..0a16d1df50b 100644
--- a/.github/workflows/slide-deck-maintainer.lock.yml
+++ b/.github/workflows/slide-deck-maintainer.lock.yml
@@ -167,7 +167,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
cache: npm
cache-dependency-path: docs/package-lock.json
diff --git a/.github/workflows/smoke-copilot-playwright.lock.yml b/.github/workflows/smoke-copilot-playwright.lock.yml
index 3753ea24ea1..86727061a12 100644
--- a/.github/workflows/smoke-copilot-playwright.lock.yml
+++ b/.github/workflows/smoke-copilot-playwright.lock.yml
@@ -7628,7 +7628,7 @@ jobs:
run: "echo \"📋 Collecting Playwright MCP logs...\"\n\n# Create logs directory\nmkdir -p /tmp/gh-aw/playwright-debug-logs\n\n# Copy any playwright logs from the MCP logs directory\nif [ -d \"/tmp/gh-aw/mcp-logs/playwright\" ]; then\n echo \"Found Playwright MCP logs directory\"\n cp -r /tmp/gh-aw/mcp-logs/playwright/* /tmp/gh-aw/playwright-debug-logs/ 2>/dev/null || true\n ls -la /tmp/gh-aw/playwright-debug-logs/\nelse\n echo \"No Playwright MCP logs directory found at /tmp/gh-aw/mcp-logs/playwright\"\nfi\n\n# List all trace files if any\necho \"Looking for trace files...\"\nfind /tmp -name \"*.zip\" -o -name \"trace*\" 2>/dev/null | head -20 || true\n\n# Show docker container logs if any containers are still running\necho \"Checking for running Docker containers...\"\ndocker ps -a --format \"table {{.Names}}\\t{{.Status}}\\t{{.Image}}\" 2>/dev/null || true\n"
- if: always()
name: Upload Playwright Debug Logs
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: ignore
name: playwright-debug-logs-${{ github.run_id }}
diff --git a/.github/workflows/smoke-detector.lock.yml b/.github/workflows/smoke-detector.lock.yml
index 074c6beb81b..6ddb3c587fd 100644
--- a/.github/workflows/smoke-detector.lock.yml
+++ b/.github/workflows/smoke-detector.lock.yml
@@ -595,7 +595,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index 1a984487991..3de2b7f9baa 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -184,7 +184,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -192,7 +192,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
@@ -208,7 +208,7 @@ jobs:
pip install --user --quiet numpy pandas matplotlib seaborn scipy
- if: always()
name: Upload charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-charts
@@ -216,7 +216,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: trending-source-and-data
diff --git a/.github/workflows/static-analysis-report.lock.yml b/.github/workflows/static-analysis-report.lock.yml
index 3b0575cce57..d535c09e308 100644
--- a/.github/workflows/static-analysis-report.lock.yml
+++ b/.github/workflows/static-analysis-report.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml
index 0dd91e497f4..b8ad8e7f5f3 100644
--- a/.github/workflows/super-linter.lock.yml
+++ b/.github/workflows/super-linter.lock.yml
@@ -166,7 +166,7 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Download super-linter log
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
with:
name: super-linter-log
path: /tmp/gh-aw/
@@ -7540,7 +7540,7 @@ jobs:
steps:
- name: Checkout Code
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
with:
fetch-depth: 0
persist-credentials: false
@@ -7568,7 +7568,7 @@ jobs:
fi
- name: Upload super-linter log
if: always()
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
name: super-linter-log
path: super-linter.log
diff --git a/.github/workflows/tidy.lock.yml b/.github/workflows/tidy.lock.yml
index dc569efd4f9..541228a7036 100644
--- a/.github/workflows/tidy.lock.yml
+++ b/.github/workflows/tidy.lock.yml
@@ -577,13 +577,13 @@ jobs:
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
- name: Set up Node.js
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6
+ uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
cache: npm
cache-dependency-path: pkg/workflow/js/package-lock.json
node-version: "24"
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6
+ uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
with:
cache: true
go-version-file: go.mod
diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml
index c7c7ecf9a69..7f6a36c37f3 100644
--- a/.github/workflows/weekly-issue-summary.lock.yml
+++ b/.github/workflows/weekly-issue-summary.lock.yml
@@ -167,7 +167,7 @@ jobs:
run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
- if: always()
name: Upload generated charts
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: data-charts
@@ -175,7 +175,7 @@ jobs:
retention-days: 30
- if: always()
name: Upload source files and data
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
with:
if-no-files-found: warn
name: python-source-and-data
diff --git a/pkg/workflow/compiler_safe_outputs_shared.go b/pkg/workflow/compiler_safe_outputs_shared.go
index 3651364d72f..238e077ca19 100644
--- a/pkg/workflow/compiler_safe_outputs_shared.go
+++ b/pkg/workflow/compiler_safe_outputs_shared.go
@@ -93,23 +93,3 @@ func (c *Compiler) buildHideCommentStepConfig(data *WorkflowData, mainJobName st
Token: cfg.GitHubToken,
}
}
-
-// buildUploadAssetsStepConfig builds the configuration for uploading assets
-func (c *Compiler) buildUploadAssetsStepConfig(data *WorkflowData, mainJobName string, threatDetectionEnabled bool) SafeOutputStepConfig {
- cfg := data.SafeOutputs.UploadAssets
-
- var customEnvVars []string
- customEnvVars = append(customEnvVars, c.buildStepLevelSafeOutputEnvVars(data, "")...)
-
- condition := BuildSafeOutputType("upload_asset")
-
- return SafeOutputStepConfig{
- StepName: "Upload Assets",
- StepID: "upload_assets",
- ScriptName: "upload_assets",
- Script: getUploadAssetsScript(),
- CustomEnvVars: customEnvVars,
- Condition: condition,
- Token: cfg.GitHubToken,
- }
-}
From f5ea3386a1b5d3b78e0189b3e43717c5c1f032f6 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 22 Dec 2025 20:18:50 +0000
Subject: [PATCH 11/11] Add threat detection dependency to upload_assets job
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/dev.lock.yml | 4 +++-
.github/workflows/poem-bot.lock.yml | 4 +++-
pkg/workflow/branch_normalize_integration_test.go | 2 +-
pkg/workflow/compiler_safe_output_jobs.go | 2 +-
pkg/workflow/publish_assets.go | 13 +++++++++++--
pkg/workflow/publish_assets_test.go | 2 +-
pkg/workflow/safe_outputs_integration_test.go | 2 +-
7 files changed, 21 insertions(+), 8 deletions(-)
diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml
index fd1a22f1edf..80ea1d66bfb 100644
--- a/.github/workflows/dev.lock.yml
+++ b/.github/workflows/dev.lock.yml
@@ -9212,7 +9212,9 @@ jobs:
path: /tmp/gh-aw/cache-memory
upload_assets:
- needs: agent
+ needs:
+ - agent
+ - detection
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
runs-on: ubuntu-slim
permissions:
diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml
index 720fe9c0374..c29dba71ec6 100644
--- a/.github/workflows/poem-bot.lock.yml
+++ b/.github/workflows/poem-bot.lock.yml
@@ -13737,7 +13737,9 @@ jobs:
path: /tmp/gh-aw/cache-memory
upload_assets:
- needs: agent
+ needs:
+ - agent
+ - detection
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
runs-on: ubuntu-slim
permissions:
diff --git a/pkg/workflow/branch_normalize_integration_test.go b/pkg/workflow/branch_normalize_integration_test.go
index 54e3b51daa8..d49c3168440 100644
--- a/pkg/workflow/branch_normalize_integration_test.go
+++ b/pkg/workflow/branch_normalize_integration_test.go
@@ -105,7 +105,7 @@ func TestUploadAssetsJobHasInlinedNormalization(t *testing.T) {
}
// Build the upload_assets job
- job, err := compiler.buildUploadAssetsJob(data, "agent")
+ job, err := compiler.buildUploadAssetsJob(data, "agent", false)
if err != nil {
t.Fatalf("Failed to build upload_assets job: %v", err)
}
diff --git a/pkg/workflow/compiler_safe_output_jobs.go b/pkg/workflow/compiler_safe_output_jobs.go
index 511ea50f0ea..376707756b5 100644
--- a/pkg/workflow/compiler_safe_output_jobs.go
+++ b/pkg/workflow/compiler_safe_output_jobs.go
@@ -70,7 +70,7 @@ func (c *Compiler) buildSafeOutputsJobs(data *WorkflowData, jobName, markdownPat
// 3. Different permissions (contents: write)
if data.SafeOutputs != nil && data.SafeOutputs.UploadAssets != nil {
compilerSafeOutputJobsLog.Print("Building separate upload_assets job")
- uploadAssetsJob, err := c.buildUploadAssetsJob(data, jobName)
+ uploadAssetsJob, err := c.buildUploadAssetsJob(data, jobName, threatDetectionEnabled)
if err != nil {
return fmt.Errorf("failed to build upload_assets job: %w", err)
}
diff --git a/pkg/workflow/publish_assets.go b/pkg/workflow/publish_assets.go
index e3fd2364de0..da0a13e53d4 100644
--- a/pkg/workflow/publish_assets.go
+++ b/pkg/workflow/publish_assets.go
@@ -4,6 +4,7 @@ import (
"fmt"
"strings"
+ "github.com/githubnext/gh-aw/pkg/constants"
"github.com/githubnext/gh-aw/pkg/logger"
)
@@ -78,8 +79,8 @@ func (c *Compiler) parseUploadAssetConfig(outputMap map[string]any) *UploadAsset
}
// buildUploadAssetsJob creates the publish_assets job
-func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string) (*Job, error) {
- publishAssetsLog.Printf("Building upload_assets job: workflow=%s, main_job=%s", data.Name, mainJobName)
+func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string, threatDetectionEnabled bool) (*Job, error) {
+ publishAssetsLog.Printf("Building upload_assets job: workflow=%s, main_job=%s, threat_detection=%v", data.Name, mainJobName, threatDetectionEnabled)
if data.SafeOutputs == nil || data.SafeOutputs.UploadAssets == nil {
return nil, fmt.Errorf("safe-outputs.upload-asset configuration is required")
@@ -129,6 +130,13 @@ func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string)
// Build the job condition using expression tree
jobCondition := BuildSafeOutputType("upload_asset")
+ // Build job dependencies
+ needs := []string{mainJobName}
+ if threatDetectionEnabled {
+ needs = append(needs, constants.DetectionJobName)
+ publishAssetsLog.Printf("Added detection job dependency for upload_assets")
+ }
+
// Use the shared builder function to create the job
return c.buildSafeOutputJob(data, SafeOutputJobConfig{
JobName: "upload_assets",
@@ -142,5 +150,6 @@ func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string)
Condition: jobCondition,
PreSteps: preSteps,
Token: data.SafeOutputs.UploadAssets.GitHubToken,
+ Needs: needs,
})
}
diff --git a/pkg/workflow/publish_assets_test.go b/pkg/workflow/publish_assets_test.go
index c9f28b0f6f5..88b6a7e6c3b 100644
--- a/pkg/workflow/publish_assets_test.go
+++ b/pkg/workflow/publish_assets_test.go
@@ -126,7 +126,7 @@ func TestUploadAssetsJobUsesFileInput(t *testing.T) {
},
}
- job, err := c.buildUploadAssetsJob(data, "agent")
+ job, err := c.buildUploadAssetsJob(data, "agent", false)
if err != nil {
t.Fatalf("Failed to build upload assets job: %v", err)
}
diff --git a/pkg/workflow/safe_outputs_integration_test.go b/pkg/workflow/safe_outputs_integration_test.go
index b5e41d71369..2fb57d3dafd 100644
--- a/pkg/workflow/safe_outputs_integration_test.go
+++ b/pkg/workflow/safe_outputs_integration_test.go
@@ -181,7 +181,7 @@ func TestSafeOutputJobsIntegration(t *testing.T) {
},
requiredEnvVar: "GH_AW_WORKFLOW_ID",
jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) {
- return c.buildUploadAssetsJob(data, mainJobName)
+ return c.buildUploadAssetsJob(data, mainJobName, false)
},
},
{