diff --git a/.changeset/patch-standardize-upload-asset.md b/.changeset/patch-standardize-upload-asset.md
new file mode 100644
index 00000000000..a051568cdf4
--- /dev/null
+++ b/.changeset/patch-standardize-upload-asset.md
@@ -0,0 +1,9 @@
+---
+"gh-aw": patch
+---
+
+Standardize safe output references to singular "upload-asset" across schemas,
+parsing, and processing logic. Includes a codemod to migrate existing workflows
+and updates to tests and documentation. This is a non-breaking internal
+standardization and tooling change.
+
diff --git a/.github/aw/schemas/agentic-workflow.json b/.github/aw/schemas/agentic-workflow.json
index 56aacaef4a7..71a285db04c 100644
--- a/.github/aw/schemas/agentic-workflow.json
+++ b/.github/aw/schemas/agentic-workflow.json
@@ -4200,7 +4200,7 @@
}
]
},
- "upload-assets": {
+ "upload-asset": {
"oneOf": [
{
"type": "object",
diff --git a/.github/workflows/audit-workflows.lock.yml b/.github/workflows/audit-workflows.lock.yml
index 4ba03324b3a..89d6836261b 100644
--- a/.github/workflows/audit-workflows.lock.yml
+++ b/.github/workflows/audit-workflows.lock.yml
@@ -2384,7 +2384,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -5623,6 +5623,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6397,13 +6398,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_TRACKER_ID: "audit-workflows-daily"
GH_AW_WORKFLOW_ID: "audit-workflows"
@@ -7588,24 +7586,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Agentic Workflow Audit Agent"
+ GH_AW_TRACKER_ID: "audit-workflows-daily"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -7635,16 +7744,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -7666,7 +7773,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7707,7 +7814,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -7723,25 +7830,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/audit-workflows.md b/.github/workflows/audit-workflows.md
index 208d0349fe2..15149cd0629 100644
--- a/.github/workflows/audit-workflows.md
+++ b/.github/workflows/audit-workflows.md
@@ -19,7 +19,7 @@ steps:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: ./gh-aw logs --start-date -1d -o /tmp/gh-aw/aw-mcp/logs
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "audits"
max: 1
@@ -160,4 +160,4 @@ Use gh-aw MCP server (not CLI directly). Run `status` tool to verify.
Cache structure: `/tmp/gh-aw/cache-memory/{audits,patterns,metrics}/*.json`
-Always create discussion with findings and update cache memory.
+Always create discussion with findings and update cache memory.
\ No newline at end of file
diff --git a/.github/workflows/copilot-pr-nlp-analysis.lock.yml b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
index 140bd7d8985..bf4816186cf 100644
--- a/.github/workflows/copilot-pr-nlp-analysis.lock.yml
+++ b/.github/workflows/copilot-pr-nlp-analysis.lock.yml
@@ -2938,7 +2938,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6526,6 +6526,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7291,13 +7292,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "copilot-pr-nlp-analysis"
GH_AW_WORKFLOW_NAME: "Copilot PR Conversation NLP Analysis"
@@ -8481,24 +8479,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: copilot-pr-data-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Copilot PR Conversation NLP Analysis"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8528,16 +8636,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8559,7 +8665,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8600,7 +8706,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8616,25 +8722,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: copilot-pr-data-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/copilot-session-insights.lock.yml b/.github/workflows/copilot-session-insights.lock.yml
index 2b6ba31969a..ace2268b63e 100644
--- a/.github/workflows/copilot-session-insights.lock.yml
+++ b/.github/workflows/copilot-session-insights.lock.yml
@@ -3372,7 +3372,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6612,6 +6612,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7383,13 +7384,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_WORKFLOW_ID: "copilot-session-insights"
GH_AW_WORKFLOW_NAME: "Copilot Session Insights"
@@ -8573,24 +8571,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Copilot Session Insights"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8620,16 +8728,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8651,7 +8757,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8692,7 +8798,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8708,25 +8814,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/copilot-session-insights.md b/.github/workflows/copilot-session-insights.md
index 098a9006679..3f719e923b4 100644
--- a/.github/workflows/copilot-session-insights.md
+++ b/.github/workflows/copilot-session-insights.md
@@ -23,7 +23,7 @@ network:
- python
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
title-prefix: "[copilot-session-insights] "
category: "audits"
@@ -743,5 +743,4 @@ A successful analysis includes:
---
-Begin your analysis by verifying the downloaded session data, loading historical context from cache memory, and proceeding through the analysis phases systematically.
-
+Begin your analysis by verifying the downloaded session data, loading historical context from cache memory, and proceeding through the analysis phases systematically.
\ No newline at end of file
diff --git a/.github/workflows/daily-copilot-token-report.lock.yml b/.github/workflows/daily-copilot-token-report.lock.yml
index 15b25097765..31fbeab4bdc 100644
--- a/.github/workflows/daily-copilot-token-report.lock.yml
+++ b/.github/workflows/daily-copilot-token-report.lock.yml
@@ -3036,7 +3036,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6623,6 +6623,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7391,13 +7392,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-copilot-token-report"
GH_AW_WORKFLOW_ID: "daily-copilot-token-report"
@@ -8582,24 +8580,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Copilot Token Consumption Report"
+ GH_AW_TRACKER_ID: "daily-copilot-token-report"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8629,16 +8738,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8660,7 +8767,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8701,7 +8808,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8717,25 +8824,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-copilot-token-report.md b/.github/workflows/daily-copilot-token-report.md
index 2ca0f360207..afba7c12ae0 100644
--- a/.github/workflows/daily-copilot-token-report.md
+++ b/.github/workflows/daily-copilot-token-report.md
@@ -47,7 +47,7 @@ steps:
exit 1
fi
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "audits"
@@ -676,4 +676,4 @@ Your output MUST:
7. Store current day's metrics in cache memory for future trend tracking
8. Use the collapsible details format from the reporting.md import
-Begin your analysis now. The logs have been pre-downloaded to `/tmp/gh-aw/copilot-logs.json` - process the data systematically, generate insightful visualizations, and create a comprehensive report that helps optimize Copilot token consumption across all workflows.
+Begin your analysis now. The logs have been pre-downloaded to `/tmp/gh-aw/copilot-logs.json` - process the data systematically, generate insightful visualizations, and create a comprehensive report that helps optimize Copilot token consumption across all workflows.
\ No newline at end of file
diff --git a/.github/workflows/daily-file-diet.lock.yml b/.github/workflows/daily-file-diet.lock.yml
index 210a9f50261..df790e2de14 100644
--- a/.github/workflows/daily-file-diet.lock.yml
+++ b/.github/workflows/daily-file-diet.lock.yml
@@ -2975,7 +2975,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6574,6 +6574,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7761,13 +7762,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-file-diet"
GH_AW_WORKFLOW_ID: "daily-file-diet"
@@ -7797,7 +7795,7 @@ jobs:
owner: ${{ github.repository_owner }}
repositories: ${{ github.event.repository.name }}
github-api-url: ${{ github.api_url }}
- permission-contents: write
+ permission-contents: read
permission-issues: write
- name: Setup JavaScript files
id: setup_scripts
@@ -8737,24 +8735,158 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+ - name: Invalidate GitHub App token
+ if: always() && steps.app-token.outputs.token != ''
+ env:
+ TOKEN: ${{ steps.app-token.outputs.token }}
+ run: |
+ echo "Revoking GitHub App installation token..."
+ # GitHub CLI will auth with the token being revoked.
+ gh api \
+ --method DELETE \
+ -H "Authorization: token $TOKEN" \
+ /installation/token || echo "Token revoke may already be expired."
+
+ echo "Token invalidation step complete."
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Generate GitHub App token
+ id: app-token
+ uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
+ with:
+ app-id: ${{ vars.APP_ID }}
+ private-key: ${{ secrets.APP_PRIVATE_KEY }}
+ owner: ${{ github.repository_owner }}
+ repositories: ${{ github.event.repository.name }}
+ github-api-url: ${{ github.api_url }}
+ permission-contents: write
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily File Diet"
+ GH_AW_TRACKER_ID: "daily-file-diet"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ steps.app-token.outputs.token }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8784,16 +8916,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8815,7 +8945,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8856,7 +8986,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8872,7 +9002,7 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
+ await main();
- name: Invalidate GitHub App token
if: always() && steps.app-token.outputs.token != ''
env:
@@ -8887,23 +9017,3 @@ jobs:
echo "Token invalidation step complete."
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
-
diff --git a/.github/workflows/daily-firewall-report.lock.yml b/.github/workflows/daily-firewall-report.lock.yml
index ab79737b3d4..85760a9a6b6 100644
--- a/.github/workflows/daily-firewall-report.lock.yml
+++ b/.github/workflows/daily-firewall-report.lock.yml
@@ -2506,7 +2506,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6102,6 +6102,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7063,13 +7064,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-firewall-report"
GH_AW_WORKFLOW_ID: "daily-firewall-report"
@@ -8254,24 +8252,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Firewall Logs Collector and Reporter"
+ GH_AW_TRACKER_ID: "daily-firewall-report"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8301,16 +8410,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8332,7 +8439,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8373,7 +8480,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8389,25 +8496,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-firewall-report.md b/.github/workflows/daily-firewall-report.md
index d1dfc0005d3..2975ae9f5c0 100644
--- a/.github/workflows/daily-firewall-report.md
+++ b/.github/workflows/daily-firewall-report.md
@@ -16,7 +16,7 @@ tracker-id: daily-firewall-report
timeout-minutes: 45
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "audits"
@@ -330,4 +330,4 @@ Create a new GitHub discussion with:
## Expected Output
-A GitHub discussion in the "audits" category containing a comprehensive daily firewall analysis report.
+A GitHub discussion in the "audits" category containing a comprehensive daily firewall analysis report.
\ No newline at end of file
diff --git a/.github/workflows/daily-issues-report.lock.yml b/.github/workflows/daily-issues-report.lock.yml
index ba2972217e8..5234acf4540 100644
--- a/.github/workflows/daily-issues-report.lock.yml
+++ b/.github/workflows/daily-issues-report.lock.yml
@@ -3119,7 +3119,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6670,6 +6670,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7572,13 +7573,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "codex"
GH_AW_TRACKER_ID: "daily-issues-report"
GH_AW_WORKFLOW_ID: "daily-issues-report"
@@ -9128,24 +9126,135 @@ jobs:
return closedDiscussions;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Issues Report Generator"
+ GH_AW_TRACKER_ID: "daily-issues-report"
+ GH_AW_ENGINE_ID: "codex"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -9175,16 +9284,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -9206,7 +9313,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -9247,7 +9354,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -9263,25 +9370,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-issues-report.md b/.github/workflows/daily-issues-report.md
index c3d430e0254..93b64fbaa97 100644
--- a/.github/workflows/daily-issues-report.md
+++ b/.github/workflows/daily-issues-report.md
@@ -14,7 +14,7 @@ tools:
github:
toolsets: [default, discussions]
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "General"
@@ -350,4 +350,4 @@ A successful run will:
- ✅ Create a new discussion with comprehensive report
- ✅ Include all required metrics and visualizations
-Begin your analysis now. Load the data, run the Python analysis, generate charts, and create the discussion report.
+Begin your analysis now. Load the data, run the Python analysis, generate charts, and create the discussion report.
\ No newline at end of file
diff --git a/.github/workflows/daily-multi-device-docs-tester.lock.yml b/.github/workflows/daily-multi-device-docs-tester.lock.yml
index dca4d3da817..3f46b06c8e1 100644
--- a/.github/workflows/daily-multi-device-docs-tester.lock.yml
+++ b/.github/workflows/daily-multi-device-docs-tester.lock.yml
@@ -2177,7 +2177,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -5450,6 +5450,7 @@ jobs:
- agent
- detection
- safe_outputs
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6225,13 +6226,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_TRACKER_ID: "daily-multi-device-docs-tester"
GH_AW_WORKFLOW_ID: "daily-multi-device-docs-tester"
@@ -7188,24 +7186,115 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Multi-Device Docs Tester"
+ GH_AW_TRACKER_ID: "daily-multi-device-docs-tester"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -7235,16 +7324,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -7266,7 +7353,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7307,7 +7394,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -7323,5 +7410,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
+ await main();
diff --git a/.github/workflows/daily-multi-device-docs-tester.md b/.github/workflows/daily-multi-device-docs-tester.md
index 2c9623f7ff3..d04dd206582 100644
--- a/.github/workflows/daily-multi-device-docs-tester.md
+++ b/.github/workflows/daily-multi-device-docs-tester.md
@@ -34,7 +34,7 @@ tools:
- "pwd*" # Print working directory
- "cd*" # Change directory
safe-outputs:
- upload-assets:
+ upload-asset:
create-issue:
network:
@@ -136,4 +136,4 @@ rm -f /tmp/server.pid
## Summary
-Provide: total devices tested, test results (passed/failed/warnings), key findings, and link to issue (if created).
+Provide: total devices tested, test results (passed/failed/warnings), key findings, and link to issue (if created).
\ No newline at end of file
diff --git a/.github/workflows/daily-news.lock.yml b/.github/workflows/daily-news.lock.yml
index 5021ff09e5c..04cc50bb64b 100644
--- a/.github/workflows/daily-news.lock.yml
+++ b/.github/workflows/daily-news.lock.yml
@@ -2829,7 +2829,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6417,6 +6417,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7185,13 +7186,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-news-weekday"
GH_AW_WORKFLOW_ID: "daily-news"
@@ -8376,24 +8374,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily News"
+ GH_AW_TRACKER_ID: "daily-news-weekday"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8423,16 +8532,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8454,7 +8561,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8495,7 +8602,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8511,25 +8618,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-news.md b/.github/workflows/daily-news.md
index 6be3809631f..bbb589fd812 100644
--- a/.github/workflows/daily-news.md
+++ b/.github/workflows/daily-news.md
@@ -27,7 +27,7 @@ network:
sandbox:
agent: awf # Firewall enabled (migrated from network.firewall)
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "daily-news"
diff --git a/.github/workflows/daily-performance-summary.lock.yml b/.github/workflows/daily-performance-summary.lock.yml
index 7681eadf9f7..4dfb57dfd70 100644
--- a/.github/workflows/daily-performance-summary.lock.yml
+++ b/.github/workflows/daily-performance-summary.lock.yml
@@ -4331,7 +4331,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: close_discussion, create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -7890,6 +7890,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -8643,13 +8644,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "codex"
GH_AW_TRACKER_ID: "daily-performance-summary"
GH_AW_WORKFLOW_ID: "daily-performance-summary"
@@ -10199,24 +10197,135 @@ jobs:
return closedDiscussions;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Daily Project Performance Summary Generator (Using Safe Inputs)"
+ GH_AW_TRACKER_ID: "daily-performance-summary"
+ GH_AW_ENGINE_ID: "codex"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -10246,16 +10355,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -10277,7 +10384,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -10318,7 +10425,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -10334,25 +10441,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-performance-summary.md b/.github/workflows/daily-performance-summary.md
index 36728bfa6d9..7963a0db970 100644
--- a/.github/workflows/daily-performance-summary.md
+++ b/.github/workflows/daily-performance-summary.md
@@ -16,7 +16,7 @@ tools:
github:
toolsets: [default, discussions]
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
category: "General"
@@ -477,4 +477,4 @@ This workflow uses safe-input tools imported from `shared/github-queries-safe-in
3. Tools are authenticated with `GITHUB_TOKEN` for GitHub API access
4. Call tools with parameters like: `github-pr-query with state: "all", limit: 1000, jq: "."`
-Begin your analysis now. **Use the safe-input tools** to gather data, run Python analysis, generate charts, and create the discussion report.
+Begin your analysis now. **Use the safe-input tools** to gather data, run Python analysis, generate charts, and create the discussion report.
\ No newline at end of file
diff --git a/.github/workflows/daily-repo-chronicle.lock.yml b/.github/workflows/daily-repo-chronicle.lock.yml
index 7ff0a27ddfe..2c47c0d0213 100644
--- a/.github/workflows/daily-repo-chronicle.lock.yml
+++ b/.github/workflows/daily-repo-chronicle.lock.yml
@@ -2751,7 +2751,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6338,6 +6338,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7106,13 +7107,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "daily-repo-chronicle"
GH_AW_WORKFLOW_ID: "daily-repo-chronicle"
@@ -8297,24 +8295,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "The Daily Repository Chronicle"
+ GH_AW_TRACKER_ID: "daily-repo-chronicle"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8344,16 +8453,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8375,7 +8482,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8416,7 +8523,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8432,25 +8539,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/daily-repo-chronicle.md b/.github/workflows/daily-repo-chronicle.md
index feb1219fc78..bcd833becfe 100644
--- a/.github/workflows/daily-repo-chronicle.md
+++ b/.github/workflows/daily-repo-chronicle.md
@@ -30,7 +30,7 @@ tools:
- default
- discussions
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
expires: 3d
title-prefix: "📰 "
diff --git a/.github/workflows/deep-report.lock.yml b/.github/workflows/deep-report.lock.yml
index 75965528401..36ec4546456 100644
--- a/.github/workflows/deep-report.lock.yml
+++ b/.github/workflows/deep-report.lock.yml
@@ -2599,7 +2599,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6158,6 +6158,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7105,13 +7106,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "codex"
GH_AW_TRACKER_ID: "deep-report-intel-agent"
GH_AW_WORKFLOW_ID: "deep-report"
@@ -8296,24 +8294,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: weekly-issues-data-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "DeepReport - Intelligence Gathering Agent"
+ GH_AW_TRACKER_ID: "deep-report-intel-agent"
+ GH_AW_ENGINE_ID: "codex"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8343,16 +8452,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8374,7 +8481,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8415,7 +8522,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8431,25 +8538,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: weekly-issues-data-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/deep-report.md b/.github/workflows/deep-report.md
index 3443fe24a24..2ee538db23f 100644
--- a/.github/workflows/deep-report.md
+++ b/.github/workflows/deep-report.md
@@ -26,7 +26,7 @@ network:
- node
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "reports"
max: 1
@@ -325,4 +325,4 @@ List all reports and data sources analyzed:
- Be **objective** - report both positive and negative trends
- **Cite sources** for all major claims
-Create a new GitHub discussion titled "DeepReport Intelligence Briefing - [Today's Date]" in the "reports" category with your analysis.
+Create a new GitHub discussion titled "DeepReport Intelligence Briefing - [Today's Date]" in the "reports" category with your analysis.
\ No newline at end of file
diff --git a/.github/workflows/dev.lock.yml b/.github/workflows/dev.lock.yml
index bf87f42dce6..80ea1d66bfb 100644
--- a/.github/workflows/dev.lock.yml
+++ b/.github/workflows/dev.lock.yml
@@ -19,11 +19,12 @@
# gh aw compile
# For more information: https://github.com/githubnext/gh-aw/blob/main/.github/aw/github-agentic-workflows.md
#
-# Test MCP gateway with issue creation in staged mode
+# Test upload-asset with Python graph generation
#
# Resolved workflow manifest:
# Imports:
# - shared/gh.md
+# - shared/python-dataviz.md
name: "Dev"
"on":
@@ -140,6 +141,9 @@ jobs:
concurrency:
group: "gh-aw-copilot-${{ github.workflow }}"
env:
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs
GH_AW_SAFE_OUTPUTS: /tmp/gh-aw/safeoutputs/outputs.jsonl
GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /tmp/gh-aw/safeoutputs/config.json
@@ -159,6 +163,44 @@ jobs:
mkdir -p /tmp/gh-aw/agent
mkdir -p /tmp/gh-aw/sandbox/agent/logs
echo "Created /tmp/gh-aw/agent directory for agentic workflow temporary files"
+ - name: Setup Python environment
+ run: "# Create working directory for Python scripts\nmkdir -p /tmp/gh-aw/python\nmkdir -p /tmp/gh-aw/python/data\nmkdir -p /tmp/gh-aw/python/charts\nmkdir -p /tmp/gh-aw/python/artifacts\n\necho \"Python environment setup complete\"\necho \"Working directory: /tmp/gh-aw/python\"\necho \"Data directory: /tmp/gh-aw/python/data\"\necho \"Charts directory: /tmp/gh-aw/python/charts\"\necho \"Artifacts directory: /tmp/gh-aw/python/artifacts\"\n"
+ - name: Install Python scientific libraries
+ run: "pip install --user --quiet numpy pandas matplotlib seaborn scipy\n\n# Verify installations\npython3 -c \"import numpy; print(f'NumPy {numpy.__version__} installed')\"\npython3 -c \"import pandas; print(f'Pandas {pandas.__version__} installed')\"\npython3 -c \"import matplotlib; print(f'Matplotlib {matplotlib.__version__} installed')\"\npython3 -c \"import seaborn; print(f'Seaborn {seaborn.__version__} installed')\"\npython3 -c \"import scipy; print(f'SciPy {scipy.__version__} installed')\"\n\necho \"All scientific libraries installed successfully\"\n"
+ - if: always()
+ name: Upload generated charts
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ if-no-files-found: warn
+ name: data-charts
+ path: /tmp/gh-aw/python/charts/*.png
+ retention-days: 30
+ - if: always()
+ name: Upload source files and data
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ if-no-files-found: warn
+ name: python-source-and-data
+ path: |
+ /tmp/gh-aw/python/*.py
+ /tmp/gh-aw/python/data/*
+ retention-days: 30
+
+ # Cache memory file share configuration from frontmatter processed below
+ - name: Create cache-memory directory
+ run: |
+ mkdir -p /tmp/gh-aw/cache-memory
+ echo "Cache memory directory created at /tmp/gh-aw/cache-memory"
+ echo "This folder provides persistent file storage across workflow runs"
+ echo "LLMs and agentic tools can freely read and write files in this directory"
+ - name: Restore cache memory file share data
+ uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+ restore-keys: |
+ memory-${{ github.workflow }}-
+ memory-
- name: Configure Git credentials
env:
REPO_NAME: ${{ github.repository }}
@@ -288,12 +330,12 @@ jobs:
mkdir -p /tmp/gh-aw/safeoutputs
mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs
cat > /tmp/gh-aw/safeoutputs/config.json << 'EOF'
- {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1}}
+ {"create_issue":{"max":1},"missing_tool":{"max":0},"noop":{"max":1},"upload_asset":{"max":5}}
EOF
cat > /tmp/gh-aw/safeoutputs/tools.json << 'EOF'
[
{
- "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[Poetry Test] \".",
+ "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[Dev Test] \".",
"inputSchema": {
"additionalProperties": false,
"properties": {
@@ -332,6 +374,23 @@ jobs:
},
"name": "create_issue"
},
+ {
+ "description": "Upload a file as a URL-addressable asset that can be referenced in issues, PRs, or comments. The file is stored on an orphaned git branch and returns a permanent URL. Use this for images, diagrams, or other files that need to be embedded in GitHub content. CONSTRAINTS: Maximum 5 asset(s) can be uploaded. Maximum file size: 10240KB. Allowed file extensions: [.png .jpg].",
+ "inputSchema": {
+ "additionalProperties": false,
+ "properties": {
+ "path": {
+ "description": "Absolute file path to upload (e.g., '/tmp/chart.png'). Must be under the workspace or /tmp directory. By default, only image files (.png, .jpg, .jpeg) are allowed; other file types require workflow configuration.",
+ "type": "string"
+ }
+ },
+ "required": [
+ "path"
+ ],
+ "type": "object"
+ },
+ "name": "upload_asset"
+ },
{
"description": "Report that a tool or capability needed to complete the task is not available. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.",
"inputSchema": {
@@ -444,6 +503,15 @@ jobs:
"maxLength": 65000
}
}
+ },
+ "upload_asset": {
+ "defaultMax": 10,
+ "fields": {
+ "path": {
+ "required": true,
+ "type": "string"
+ }
+ }
}
}
EOF
@@ -3173,6 +3241,9 @@ jobs:
env:
GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }}
+ GH_AW_ASSETS_BRANCH: ${{ env.GH_AW_ASSETS_BRANCH }}
+ GH_AW_ASSETS_MAX_SIZE_KB: ${{ env.GH_AW_ASSETS_MAX_SIZE_KB }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ${{ env.GH_AW_ASSETS_ALLOWED_EXTS }}
GH_AW_SAFE_INPUTS_PORT: ${{ steps.safe-inputs-start.outputs.port }}
GH_AW_SAFE_INPUTS_API_KEY: ${{ steps.safe-inputs-start.outputs.api_key }}
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -3382,7 +3453,7 @@ jobs:
event_name: context.eventName,
staged: false,
network_mode: "defaults",
- allowed_domains: ["api.github.com"],
+ allowed_domains: ["api.github.com","defaults","python"],
firewall_enabled: true,
awf_version: "v0.7.0",
steps: {
@@ -3463,22 +3534,268 @@ jobs:
- # Test MCP Gateway: Read Last Issue and Write Poem in Staged Mode
+ # Python Data Visualization Guide
+
+ Python scientific libraries have been installed and are ready for use. A temporary folder structure has been created at `/tmp/gh-aw/python/` for organizing scripts, data, and outputs.
+
+ ## Installed Libraries
+
+ - **NumPy**: Array processing and numerical operations
+ - **Pandas**: Data manipulation and analysis
+ - **Matplotlib**: Chart generation and plotting
+ - **Seaborn**: Statistical data visualization
+ - **SciPy**: Scientific computing utilities
+
+ ## Directory Structure
+
+ ```
+ /tmp/gh-aw/python/
+ ├── data/ # Store all data files here (CSV, JSON, etc.)
+ ├── charts/ # Generated chart images (PNG)
+ ├── artifacts/ # Additional output files
+ └── *.py # Python scripts
+ ```
+
+ ## Data Separation Requirement
+
+ **CRITICAL**: Data must NEVER be inlined in Python code. Always store data in external files and load using pandas.
+
+ ### ❌ PROHIBITED - Inline Data
+ ```python
+ # DO NOT do this
+ data = [10, 20, 30, 40, 50]
+ labels = ['A', 'B', 'C', 'D', 'E']
+ ```
+
+ ### ✅ REQUIRED - External Data Files
+ ```python
+ # Always load data from external files
+ import pandas as pd
+
+ # Load data from CSV
+ data = pd.read_csv('/tmp/gh-aw/python/data/data.csv')
+
+ # Or from JSON
+ data = pd.read_json('/tmp/gh-aw/python/data/data.json')
+ ```
+
+ ## Chart Generation Best Practices
+
+ ### High-Quality Chart Settings
+
+ ```python
+ import matplotlib.pyplot as plt
+ import seaborn as sns
+
+ # Set style for better aesthetics
+ sns.set_style("whitegrid")
+ sns.set_palette("husl")
+
+ # Create figure with high DPI
+ fig, ax = plt.subplots(figsize=(10, 6), dpi=300)
+
+ # Your plotting code here
+ # ...
+
+ # Save with high quality
+ plt.savefig('/tmp/gh-aw/python/charts/chart.png',
+ dpi=300,
+ bbox_inches='tight',
+ facecolor='white',
+ edgecolor='none')
+ ```
+
+ ### Chart Quality Guidelines
+
+ - **DPI**: Use 300 or higher for publication quality
+ - **Figure Size**: Standard is 10x6 inches (adjustable based on needs)
+ - **Labels**: Always include clear axis labels and titles
+ - **Legend**: Add legends when plotting multiple series
+ - **Grid**: Enable grid lines for easier reading
+ - **Colors**: Use colorblind-friendly palettes (seaborn defaults are good)
+
+ ## Including Images in Reports
+
+ When creating reports (issues, discussions, etc.), use the `upload asset` tool to make images URL-addressable and include them in markdown:
+
+ ### Step 1: Generate and Upload Chart
+ ```python
+ # Generate your chart
+ plt.savefig('/tmp/gh-aw/python/charts/my_chart.png', dpi=300, bbox_inches='tight')
+ ```
+
+ ### Step 2: Upload as Asset
+ Use the `upload asset` tool to upload the chart file. The tool will return a GitHub raw content URL.
+
+ ### Step 3: Include in Markdown Report
+ When creating your discussion or issue, include the image using markdown:
+
+ ```markdown
+ ## Visualization Results
+
+ 
+
+ The chart above shows...
+ ```
+
+ **Important**: Assets are published to an orphaned git branch and become URL-addressable after workflow completion.
+
+ ## Cache Memory Integration
+
+ The cache memory at `/tmp/gh-aw/cache-memory/` is available for storing reusable code:
+
+ **Helper Functions to Cache:**
+ - Data loading utilities: `data_loader.py`
+ - Chart styling functions: `chart_utils.py`
+ - Common data transformations: `transforms.py`
+
+ **Check Cache Before Creating:**
+ ```bash
+ # Check if helper exists in cache
+ if [ -f /tmp/gh-aw/cache-memory/data_loader.py ]; then
+ cp /tmp/gh-aw/cache-memory/data_loader.py /tmp/gh-aw/python/
+ echo "Using cached data_loader.py"
+ fi
+ ```
+
+ **Save to Cache for Future Runs:**
+ ```bash
+ # Save useful helpers to cache
+ cp /tmp/gh-aw/python/data_loader.py /tmp/gh-aw/cache-memory/
+ echo "Saved data_loader.py to cache for future runs"
+ ```
+
+ ## Complete Example Workflow
+
+ ```python
+ #!/usr/bin/env python3
+ """
+ Example data visualization script
+ Generates a bar chart from external data
+ """
+ import pandas as pd
+ import matplotlib.pyplot as plt
+ import seaborn as sns
+
+ # Set style
+ sns.set_style("whitegrid")
+ sns.set_palette("husl")
+
+ # Load data from external file (NEVER inline)
+ data = pd.read_csv('/tmp/gh-aw/python/data/data.csv')
+
+ # Process data
+ summary = data.groupby('category')['value'].sum()
+
+ # Create chart
+ fig, ax = plt.subplots(figsize=(10, 6), dpi=300)
+ summary.plot(kind='bar', ax=ax)
+
+ # Customize
+ ax.set_title('Data Summary by Category', fontsize=16, fontweight='bold')
+ ax.set_xlabel('Category', fontsize=12)
+ ax.set_ylabel('Value', fontsize=12)
+ ax.grid(True, alpha=0.3)
+
+ # Save chart
+ plt.savefig('/tmp/gh-aw/python/charts/chart.png',
+ dpi=300,
+ bbox_inches='tight',
+ facecolor='white')
+
+ print("Chart saved to /tmp/gh-aw/python/charts/chart.png")
+ ```
+
+ ## Error Handling
+
+ **Check File Existence:**
+ ```python
+ import os
+
+ data_file = '/tmp/gh-aw/python/data/data.csv'
+ if not os.path.exists(data_file):
+ raise FileNotFoundError(f"Data file not found: {data_file}")
+ ```
+
+ **Validate Data:**
+ ```python
+ # Check for required columns
+ required_cols = ['category', 'value']
+ missing = set(required_cols) - set(data.columns)
+ if missing:
+ raise ValueError(f"Missing columns: {missing}")
+ ```
+
+ ## Artifact Upload
+
+ Charts and source files are automatically uploaded as artifacts:
+
+ **Charts Artifact:**
+ - Name: `data-charts`
+ - Contents: PNG files from `/tmp/gh-aw/python/charts/`
+ - Retention: 30 days
+
+ **Source and Data Artifact:**
+ - Name: `python-source-and-data`
+ - Contents: Python scripts and data files
+ - Retention: 30 days
+
+ Both artifacts are uploaded with `if: always()` condition, ensuring they're available even if the workflow fails.
+
+ ## Tips for Success
+
+ 1. **Always Separate Data**: Store data in files, never inline in code
+ 2. **Use Cache Memory**: Store reusable helpers for faster execution
+ 3. **High Quality Charts**: Use DPI 300+ and proper sizing
+ 4. **Clear Documentation**: Add docstrings and comments
+ 5. **Error Handling**: Validate data and check file existence
+ 6. **Type Hints**: Use type annotations for better code quality
+ 7. **Seaborn Defaults**: Leverage seaborn for better aesthetics
+ 8. **Reproducibility**: Set random seeds when needed
+
+ ## Common Data Sources
+
+ Based on common use cases:
+
+ **Repository Statistics:**
+ ```python
+ # Collect via GitHub API, save to data.csv
+ # Then load and visualize
+ data = pd.read_csv('/tmp/gh-aw/python/data/repo_stats.csv')
+ ```
+
+ **Workflow Metrics:**
+ ```python
+ # Collect via GitHub Actions API, save to data.json
+ data = pd.read_json('/tmp/gh-aw/python/data/workflow_metrics.json')
+ ```
+
+ **Sample Data Generation:**
+ ```python
+ # Generate with NumPy, save to file first
+ import numpy as np
+ data = np.random.randn(100, 2)
+ df = pd.DataFrame(data, columns=['x', 'y'])
+ df.to_csv('/tmp/gh-aw/python/data/sample_data.csv', index=False)
+
+ # Then load it back (demonstrating the pattern)
+ data = pd.read_csv('/tmp/gh-aw/python/data/sample_data.csv')
+ ```
+
+ # Test Upload Asset with Python Graph Generation
- Read the most recent issue from the repository and write a creative poem about it in a new issue using **staged mode** (preview mode).
+ Create a dummy graph using Python and matplotlib, then upload it as an asset.
**Requirements:**
- 1. Use the GitHub tools to fetch the most recent issue from this repository
- 2. Read the issue title and body to understand what it's about
- 3. Write a short, creative poem (4-6 lines) inspired by the content of that issue
- 4. Create a new issue with:
- - Title: Start with the prefix "[Poetry Test]" followed by a creative title that relates to the original issue
- - Body: Your poem about the issue, plus a reference to the original issue number
- 5. **IMPORTANT**: Use staged mode (add `staged: true` to your create-issue call) so the issue is previewed with the 🎭 indicator but not actually created
- 6. Confirm that:
- - You successfully read the last issue
- - You created a poem inspired by it
- - The new issue was created in staged mode with the 🎭 indicator
+ 1. Use Python to create a simple graph (e.g., a sine wave or bar chart) using matplotlib
+ 2. Save the graph as a PNG file to /tmp/graph.png
+ 3. Use the `upload_asset` tool to upload the graph
+ 4. The tool should return a URL where the graph can be accessed
+ 5. Create an issue that includes the graph using markdown image syntax
+ 6. Verify that:
+ - The graph file was created successfully
+ - The asset was uploaded and a URL was returned
+ - The issue was created with the embedded graph image
PROMPT_EOF
- name: Append XPIA security instructions to prompt
@@ -3513,6 +3830,31 @@ jobs:
When you need to create temporary files or directories during your work, always use the /tmp/gh-aw/agent/ directory that has been pre-created for you. Do NOT use the root /tmp/ directory directly.
+ PROMPT_EOF
+ - name: Append cache memory instructions to prompt
+ env:
+ GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
+ run: |
+ cat << 'PROMPT_EOF' >> "$GH_AW_PROMPT"
+
+ ---
+
+ ## Cache Folder Available
+
+ You have access to a persistent cache folder at `/tmp/gh-aw/cache-memory/` where you can read and write files to create memories and store information.
+
+ - **Read/Write Access**: You can freely read from and write to any files in this folder
+ - **Persistence**: Files in this folder persist across workflow runs via GitHub Actions cache
+ - **Last Write Wins**: If multiple processes write to the same file, the last write will be preserved
+ - **File Share**: Use this as a simple file share - organize files as you see fit
+
+ Examples of what you can store:
+ - `/tmp/gh-aw/cache-memory/notes.txt` - general notes and observations
+ - `/tmp/gh-aw/cache-memory/preferences.json` - user preferences and settings
+ - `/tmp/gh-aw/cache-memory/history.log` - activity history and logs
+ - `/tmp/gh-aw/cache-memory/state/` - organized state files in subdirectories
+
+ Feel free to create, read, update, and organize files in this folder as needed for your tasks.
PROMPT_EOF
- name: Append safe outputs instructions to prompt
env:
@@ -3527,7 +3869,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -3813,18 +4155,18 @@ jobs:
- name: Execute GitHub Copilot CLI
id: agentic_execution
# Copilot CLI tool arguments (sorted):
- # --allow-tool github
- # --allow-tool safeinputs
- # --allow-tool safeoutputs
timeout-minutes: 5
run: |
set -o pipefail
sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --mount /tmp:/tmp:rw --mount "${GITHUB_WORKSPACE}:${GITHUB_WORKSPACE}:rw" --mount /usr/bin/date:/usr/bin/date:ro --mount /usr/bin/gh:/usr/bin/gh:ro --mount /usr/bin/yq:/usr/bin/yq:ro --mount /usr/local/bin/copilot:/usr/local/bin/copilot:ro --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --image-tag 0.7.0 \
- -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool safeinputs --allow-tool safeoutputs --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \
+ -- /usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-all-tools --add-dir /tmp/gh-aw/cache-memory/ --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"} \
2>&1 | tee /tmp/gh-aw/agent-stdio.log
env:
COPILOT_AGENT_RUNNER_TYPE: STANDALONE
COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }}
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg"
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json
GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }}
@@ -6865,6 +7207,19 @@ jobs:
name: agent-stdio.log
path: /tmp/gh-aw/agent-stdio.log
if-no-files-found: warn
+ - name: Upload cache-memory data as artifact
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ if: always()
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Upload safe outputs assets
+ if: always()
+ uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ if-no-files-found: ignore
- name: Validate agent logs for errors
if: always()
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
@@ -7108,6 +7463,8 @@ jobs:
- agent
- detection
- safe_outputs
+ - update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7645,7 +8002,7 @@ jobs:
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
WORKFLOW_NAME: "Dev"
- WORKFLOW_DESCRIPTION: "Test MCP gateway with issue creation in staged mode"
+ WORKFLOW_DESCRIPTION: "Test upload-asset with Python graph generation"
with:
script: |
const fs = require('fs');
@@ -8540,7 +8897,7 @@ jobs:
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- GH_AW_ISSUE_TITLE_PREFIX: "[Poetry Test] "
+ GH_AW_ISSUE_TITLE_PREFIX: "[Dev Test] "
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
@@ -8834,3 +9191,249 @@ jobs:
await main();
})();
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs:
+ - agent
+ - detection
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
+ id: upload_assets
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg"
+ GH_AW_WORKFLOW_NAME: "Dev"
+ GH_AW_ENGINE_ID: "copilot"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of uploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of uploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ await main();
+
diff --git a/.github/workflows/dev.md b/.github/workflows/dev.md
index bd673f95997..b5efd5d9f0d 100644
--- a/.github/workflows/dev.md
+++ b/.github/workflows/dev.md
@@ -2,7 +2,7 @@
on:
workflow_dispatch:
name: Dev
-description: Test MCP gateway with issue creation in staged mode
+description: Test upload-asset with Python graph generation
timeout-minutes: 5
strict: true
engine: copilot
@@ -18,27 +18,31 @@ sandbox:
tools:
github:
toolsets: [issues]
+
safe-outputs:
+ upload-asset:
+ allowed-exts: [".png", ".jpg"]
+ max: 5
create-issue:
- title-prefix: "[Poetry Test] "
+ title-prefix: "[Dev Test] "
max: 1
+
imports:
- shared/gh.md
+ - shared/python-dataviz.md
---
-# Test MCP Gateway: Read Last Issue and Write Poem in Staged Mode
+# Test Upload Asset with Python Graph Generation
-Read the most recent issue from the repository and write a creative poem about it in a new issue using **staged mode** (preview mode).
+Create a dummy graph using Python and matplotlib, then upload it as an asset.
**Requirements:**
-1. Use the GitHub tools to fetch the most recent issue from this repository
-2. Read the issue title and body to understand what it's about
-3. Write a short, creative poem (4-6 lines) inspired by the content of that issue
-4. Create a new issue with:
- - Title: Start with the prefix "[Poetry Test]" followed by a creative title that relates to the original issue
- - Body: Your poem about the issue, plus a reference to the original issue number
-5. **IMPORTANT**: Use staged mode (add `staged: true` to your create-issue call) so the issue is previewed with the 🎭 indicator but not actually created
-6. Confirm that:
- - You successfully read the last issue
- - You created a poem inspired by it
- - The new issue was created in staged mode with the 🎭 indicator
+1. Use Python to create a simple graph (e.g., a sine wave or bar chart) using matplotlib
+2. Save the graph as a PNG file to /tmp/graph.png
+3. Use the `upload_asset` tool to upload the graph
+4. The tool should return a URL where the graph can be accessed
+5. Create an issue that includes the graph using markdown image syntax
+6. Verify that:
+ - The graph file was created successfully
+ - The asset was uploaded and a URL was returned
+ - The issue was created with the embedded graph image
diff --git a/.github/workflows/docs-noob-tester.lock.yml b/.github/workflows/docs-noob-tester.lock.yml
index c707d2ad9a9..3050d5c23ce 100644
--- a/.github/workflows/docs-noob-tester.lock.yml
+++ b/.github/workflows/docs-noob-tester.lock.yml
@@ -2228,7 +2228,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -5809,6 +5809,7 @@ jobs:
- agent
- detection
- safe_outputs
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6574,13 +6575,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "docs-noob-tester"
GH_AW_WORKFLOW_NAME: "Documentation Noob Tester"
@@ -7764,24 +7762,114 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Documentation Noob Tester"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -7811,16 +7899,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -7842,7 +7928,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -7883,7 +7969,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -7899,5 +7985,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
+ await main();
diff --git a/.github/workflows/docs-noob-tester.md b/.github/workflows/docs-noob-tester.md
index 3c0a2e689f4..578f79b5f78 100644
--- a/.github/workflows/docs-noob-tester.md
+++ b/.github/workflows/docs-noob-tester.md
@@ -16,7 +16,7 @@ tools:
bash:
- "*"
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "General"
close-older-discussions: true
@@ -186,4 +186,4 @@ You've successfully completed this task if you:
- Navigated at least 5 key documentation pages
- Identified specific pain points with examples
- Provided actionable recommendations
-- Created a discussion with clear findings and screenshots
+- Created a discussion with clear findings and screenshots
\ No newline at end of file
diff --git a/.github/workflows/github-mcp-structural-analysis.lock.yml b/.github/workflows/github-mcp-structural-analysis.lock.yml
index b92a5a28d4d..30ae838bb2f 100644
--- a/.github/workflows/github-mcp-structural-analysis.lock.yml
+++ b/.github/workflows/github-mcp-structural-analysis.lock.yml
@@ -2726,7 +2726,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -5965,6 +5965,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6736,13 +6737,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_WORKFLOW_ID: "github-mcp-structural-analysis"
GH_AW_WORKFLOW_NAME: "GitHub MCP Structural Analysis"
@@ -7926,24 +7924,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "GitHub MCP Structural Analysis"
+ GH_AW_ENGINE_ID: "claude"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -7973,16 +8081,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8004,7 +8110,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8045,7 +8151,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8061,25 +8167,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/intelligence.lock.yml b/.github/workflows/intelligence.lock.yml
index c865d19c013..55904c9d354 100644
--- a/.github/workflows/intelligence.lock.yml
+++ b/.github/workflows/intelligence.lock.yml
@@ -3209,7 +3209,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6806,6 +6806,7 @@ jobs:
- push_repo_memory
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7765,13 +7766,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "intelligence"
GH_AW_WORKFLOW_NAME: "Campaign Intelligence System"
@@ -8727,24 +8725,134 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Campaign Intelligence System"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8774,16 +8882,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8805,7 +8911,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8846,7 +8952,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8862,25 +8968,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/issue-classifier.lock.yml b/.github/workflows/issue-classifier.lock.yml
index ddfc3f26710..733eb8392fa 100644
--- a/.github/workflows/issue-classifier.lock.yml
+++ b/.github/workflows/issue-classifier.lock.yml
@@ -2999,7 +2999,7 @@ jobs:
path: /tmp/gh-aw/aw_info.json
if-no-files-found: warn
- name: Run AI Inference
- uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
+ uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v1
env:
GH_AW_MCP_CONFIG: /tmp/gh-aw/mcp-config/mcp-servers.json
GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt
diff --git a/.github/workflows/org-health-report.lock.yml b/.github/workflows/org-health-report.lock.yml
index 86c8b33e999..60b585324b8 100644
--- a/.github/workflows/org-health-report.lock.yml
+++ b/.github/workflows/org-health-report.lock.yml
@@ -2866,7 +2866,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6452,6 +6452,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7217,13 +7218,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "org-health-report"
GH_AW_WORKFLOW_NAME: "Organization Health Report"
@@ -8407,24 +8405,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Organization Health Report"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8454,16 +8562,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8485,7 +8591,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8526,7 +8632,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8542,25 +8648,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/org-health-report.md b/.github/workflows/org-health-report.md
index 48d8031d93f..bd4d6506daf 100644
--- a/.github/workflows/org-health-report.md
+++ b/.github/workflows/org-health-report.md
@@ -25,7 +25,7 @@ safe-outputs:
category: "reports"
max: 1
close-older-discussions: true
- upload-assets:
+ upload-asset:
timeout-minutes: 60
strict: true
network:
@@ -485,4 +485,4 @@ A successful health report:
- ✅ Publishes report as GitHub Discussion
- ✅ Completes within 60 minute timeout
-Begin the organization health report analysis now. Follow the phases in order, add appropriate delays, and generate a comprehensive report for maintainers.
+Begin the organization health report analysis now. Follow the phases in order, add appropriate delays, and generate a comprehensive report for maintainers.
\ No newline at end of file
diff --git a/.github/workflows/poem-bot.lock.yml b/.github/workflows/poem-bot.lock.yml
index bfe6ae1a7bb..c29dba71ec6 100644
--- a/.github/workflows/poem-bot.lock.yml
+++ b/.github/workflows/poem-bot.lock.yml
@@ -3371,7 +3371,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: add_comment, add_labels, close_pull_request, create_agent_task, create_discussion, create_issue, create_pull_request, create_pull_request_review_comment, link_sub_issue, missing_tool, noop, push_to_pull_request_branch, update_issue, upload_assets
+ **Available tools**: add_comment, add_labels, close_pull_request, create_agent_task, create_discussion, create_issue, create_pull_request, create_pull_request_review_comment, link_sub_issue, missing_tool, noop, push_to_pull_request_branch, update_issue, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -7011,6 +7011,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7988,9 +7989,6 @@ jobs:
pull-requests: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_ENGINE_MODEL: "gpt-5"
GH_AW_SAFE_OUTPUTS_STAGED: "true"
@@ -13262,143 +13260,6 @@ jobs:
await core.summary.addRaw(summaryContent).write();
}
(async () => { await main(); })();
- - name: Upload Assets
- id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- env:
- GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
- GH_AW_SAFE_OUTPUTS_STAGED: "true"
- with:
- github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
- script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
- const fs = require("fs");
- const path = require("path");
- const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
- function normalizeBranchName(branchName) {
- if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
- return branchName;
- }
- let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
- normalized = normalized.replace(/-+/g, "-");
- normalized = normalized.replace(/^-+|-+$/g, "");
- if (normalized.length > 128) {
- normalized = normalized.substring(0, 128);
- }
- normalized = normalized.replace(/-+$/, "");
- normalized = normalized.toLowerCase();
- return normalized;
- }
- async function main() {
- const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
- const branchName = process.env.GH_AW_ASSETS_BRANCH;
- if (!branchName || typeof branchName !== "string") {
- core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
- return;
- }
- const normalizedBranchName = normalizeBranchName(branchName);
- core.info(`Using assets branch: ${normalizedBranchName}`);
- const result = loadAgentOutput();
- if (!result.success) {
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
- core.info("No upload-asset items found in agent output");
- core.setOutput("upload_count", "0");
- core.setOutput("branch_name", normalizedBranchName);
- return;
- }
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
- let uploadCount = 0;
- let hasChanges = false;
- try {
- try {
- await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
- await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
- core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
- } catch (originError) {
- if (!normalizedBranchName.startsWith("assets/")) {
- core.setFailed(
- `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
- `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
- `Please create the branch manually first, or use a branch name starting with 'assets/'.`
- );
- return;
- }
- core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
- await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
- await exec.exec(`git rm -rf .`);
- await exec.exec(`git clean -fdx`);
- }
- for (const asset of allUploadItems) {
- try {
- const { fileName, sha, size, targetFileName } = asset;
- if (!fileName || !sha || !targetFileName) {
- core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
- continue;
- }
- const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
- if (!fs.existsSync(assetSourcePath)) {
- core.warning(`Asset file not found: ${assetSourcePath}`);
- continue;
- }
- const fileContent = fs.readFileSync(assetSourcePath);
- const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
- if (computedSha !== sha) {
- core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
- continue;
- }
- if (fs.existsSync(targetFileName)) {
- core.info(`Asset ${targetFileName} already exists, skipping`);
- continue;
- }
- fs.copyFileSync(assetSourcePath, targetFileName);
- await exec.exec(`git add "${targetFileName}"`);
- uploadCount++;
- hasChanges = true;
- core.info(`Added asset: ${targetFileName} (${size} bytes)`);
- } catch (error) {
- core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
- }
- }
- if (hasChanges) {
- const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
- await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
- if (isStaged) {
- core.summary.addRaw("## Staged Asset Publication");
- } else {
- await exec.exec(`git push origin ${normalizedBranchName}`);
- core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
- core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
- }
- for (const asset of allUploadItems) {
- if (asset.fileName && asset.sha && asset.size && asset.url) {
- core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
- }
- }
- core.summary.write();
- } else {
- core.info("No new assets to upload");
- }
- } catch (error) {
- core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
- return;
- }
- core.setOutput("upload_count", uploadCount.toString());
- core.setOutput("branch_name", normalizedBranchName);
- }
- (async () => { await main(); })();
- name: Link Sub Issue
id: link_sub_issue
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'link_sub_issue'))
@@ -13875,3 +13736,232 @@ jobs:
key: poem-memory-${{ github.workflow }}-${{ github.run_id }}
path: /tmp/gh-aw/cache-memory
+ upload_assets:
+ needs:
+ - agent
+ - detection
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
+ id: upload_assets
+ uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
+ env:
+ GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Poem Bot - A Creative Agentic Workflow"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_ENGINE_MODEL: "gpt-5"
+ GH_AW_SAFE_OUTPUTS_STAGED: "true"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🪶 *Verses penned by [{workflow_name}]({run_url})*\",\"runStarted\":\"🎭 Hear ye! The muse stirs! [{workflow_name}]({run_url}) takes quill in hand for this {event_type}...\",\"runSuccess\":\"🪶 The poem is writ! [{workflow_name}]({run_url}) has composed verses most fair. Applause! 👏\",\"runFailure\":\"🎭 Alas! [{workflow_name}]({run_url}) {status}. The muse has fled, leaving verses unsung...\"}"
+ with:
+ github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
+ script: |
+ const fs = require("fs");
+ const path = require("path");
+ const crypto = require("crypto");
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
+ function normalizeBranchName(branchName) {
+ if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
+ return branchName;
+ }
+ let normalized = branchName.replace(/[^a-zA-Z0-9\-_/.]+/g, "-");
+ normalized = normalized.replace(/-+/g, "-");
+ normalized = normalized.replace(/^-+|-+$/g, "");
+ if (normalized.length > 128) {
+ normalized = normalized.substring(0, 128);
+ }
+ normalized = normalized.replace(/-+$/, "");
+ normalized = normalized.toLowerCase();
+ return normalized;
+ }
+ async function main() {
+ const isStaged = process.env.GH_AW_SAFE_OUTPUTS_STAGED === "true";
+ const branchName = process.env.GH_AW_ASSETS_BRANCH;
+ if (!branchName || typeof branchName !== "string") {
+ core.setFailed("GH_AW_ASSETS_BRANCH environment variable is required but not set");
+ return;
+ }
+ const normalizedBranchName = normalizeBranchName(branchName);
+ core.info(`Using assets branch: ${normalizedBranchName}`);
+ const result = loadAgentOutput();
+ if (!result.success) {
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
+ core.info("No upload-asset items found in agent output");
+ core.setOutput("upload_count", "0");
+ core.setOutput("branch_name", normalizedBranchName);
+ return;
+ }
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
+ let uploadCount = 0;
+ let hasChanges = false;
+ try {
+ try {
+ await exec.exec(`git rev-parse --verify origin/${normalizedBranchName}`);
+ await exec.exec(`git checkout -B ${normalizedBranchName} origin/${normalizedBranchName}`);
+ core.info(`Checked out existing branch from origin: ${normalizedBranchName}`);
+ } catch (originError) {
+ if (!normalizedBranchName.startsWith("assets/")) {
+ core.setFailed(
+ `Branch '${normalizedBranchName}' does not start with the required 'assets/' prefix. ` +
+ `Orphaned branches can only be automatically created under the 'assets/' prefix. ` +
+ `Please create the branch manually first, or use a branch name starting with 'assets/'.`
+ );
+ return;
+ }
+ core.info(`Creating new orphaned branch: ${normalizedBranchName}`);
+ await exec.exec(`git checkout --orphan ${normalizedBranchName}`);
+ await exec.exec(`git rm -rf .`);
+ await exec.exec(`git clean -fdx`);
+ }
+ for (const asset of uploadItems) {
+ try {
+ const { fileName, sha, size, targetFileName } = asset;
+ if (!fileName || !sha || !targetFileName) {
+ core.error(`Invalid asset entry missing required fields: ${JSON.stringify(asset)}`);
+ continue;
+ }
+ const assetSourcePath = path.join("/tmp/gh-aw/safeoutputs/assets", fileName);
+ if (!fs.existsSync(assetSourcePath)) {
+ core.warning(`Asset file not found: ${assetSourcePath}`);
+ continue;
+ }
+ const fileContent = fs.readFileSync(assetSourcePath);
+ const computedSha = crypto.createHash("sha256").update(fileContent).digest("hex");
+ if (computedSha !== sha) {
+ core.warning(`SHA mismatch for ${fileName}: expected ${sha}, got ${computedSha}`);
+ continue;
+ }
+ if (fs.existsSync(targetFileName)) {
+ core.info(`Asset ${targetFileName} already exists, skipping`);
+ continue;
+ }
+ fs.copyFileSync(assetSourcePath, targetFileName);
+ await exec.exec(`git add "${targetFileName}"`);
+ uploadCount++;
+ hasChanges = true;
+ core.info(`Added asset: ${targetFileName} (${size} bytes)`);
+ } catch (error) {
+ core.warning(`Failed to process asset ${asset.fileName}: ${error instanceof Error ? error.message : String(error)}`);
+ }
+ }
+ if (hasChanges) {
+ const commitMessage = `[skip-ci] Add ${uploadCount} asset(s)`;
+ await exec.exec(`git`, [`commit`, `-m`, commitMessage]);
+ if (isStaged) {
+ core.summary.addRaw("## Staged Asset Publication");
+ } else {
+ await exec.exec(`git push origin ${normalizedBranchName}`);
+ core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
+ core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
+ }
+ for (const asset of uploadItems) {
+ if (asset.fileName && asset.sha && asset.size && asset.url) {
+ core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
+ }
+ }
+ core.summary.write();
+ } else {
+ core.info("No new assets to upload");
+ }
+ } catch (error) {
+ core.setFailed(`Failed to upload assets: ${error instanceof Error ? error.message : String(error)}`);
+ return;
+ }
+ core.setOutput("upload_count", uploadCount.toString());
+ core.setOutput("branch_name", normalizedBranchName);
+ }
+ await main();
+
diff --git a/.github/workflows/poem-bot.md b/.github/workflows/poem-bot.md
index 5f19bc31b9c..9ef6cc10ec0 100644
--- a/.github/workflows/poem-bot.md
+++ b/.github/workflows/poem-bot.md
@@ -119,7 +119,7 @@ safe-outputs:
base: main
# Upload assets
- upload-assets:
+ upload-asset:
# Missing tool reporting
missing-tool:
@@ -180,4 +180,4 @@ Use the safe-outputs capabilities to:
## Begin Your Poetic Journey!
-Examine the current context and create your masterpiece! Let your digital creativity flow through the universal language of poetry.
+Examine the current context and create your masterpiece! Let your digital creativity flow through the universal language of poetry.
\ No newline at end of file
diff --git a/.github/workflows/portfolio-analyst.lock.yml b/.github/workflows/portfolio-analyst.lock.yml
index d7f8bdaa2bd..94c90a48f83 100644
--- a/.github/workflows/portfolio-analyst.lock.yml
+++ b/.github/workflows/portfolio-analyst.lock.yml
@@ -2856,7 +2856,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6443,6 +6443,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7211,13 +7212,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "portfolio-analyst-weekly"
GH_AW_WORKFLOW_ID: "portfolio-analyst"
@@ -8402,24 +8400,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Automated Portfolio Analyst"
+ GH_AW_TRACKER_ID: "portfolio-analyst-weekly"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8449,16 +8558,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8480,7 +8587,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8521,7 +8628,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8537,25 +8644,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/portfolio-analyst.md b/.github/workflows/portfolio-analyst.md
index d1f042d09af..6a3b624e9cc 100644
--- a/.github/workflows/portfolio-analyst.md
+++ b/.github/workflows/portfolio-analyst.md
@@ -28,7 +28,7 @@ safe-outputs:
title-prefix: "[portfolio] "
category: "Audits"
close-older-discussions: true
- upload-assets:
+ upload-asset:
timeout-minutes: 20
imports:
- shared/mcp/gh-aw.md
@@ -573,4 +573,4 @@ print("✅ All charts generated")
✅ Healthy workflows are briefly mentioned but not analyzed
✅ All dollar amounts are from actual workflow execution data
-Begin your analysis now. **FIRST**: Generate all 4 required charts from `/tmp/portfolio-logs/summary.json` and upload them as assets. **THEN**: Create the dashboard-style discussion with embedded chart URLs. Read from the pre-downloaded JSON file at `/tmp/portfolio-logs/summary.json` to get real execution data for all workflows. This file contains everything you need: summary metrics and individual run data. DO NOT attempt to call `gh aw logs` or any `gh` commands - they will not work. Move fast, focus on high-impact issues, deliver actionable recommendations based on actual costs, and make the report visual and scannable.
+Begin your analysis now. **FIRST**: Generate all 4 required charts from `/tmp/portfolio-logs/summary.json` and upload them as assets. **THEN**: Create the dashboard-style discussion with embedded chart URLs. Read from the pre-downloaded JSON file at `/tmp/portfolio-logs/summary.json` to get real execution data for all workflows. This file contains everything you need: summary metrics and individual run data. DO NOT attempt to call `gh aw logs` or any `gh` commands - they will not work. Move fast, focus on high-impact issues, deliver actionable recommendations based on actual costs, and make the report visual and scannable.
\ No newline at end of file
diff --git a/.github/workflows/python-data-charts.lock.yml b/.github/workflows/python-data-charts.lock.yml
index be14f4f9c58..cf486d2814f 100644
--- a/.github/workflows/python-data-charts.lock.yml
+++ b/.github/workflows/python-data-charts.lock.yml
@@ -3156,7 +3156,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6744,6 +6744,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7509,13 +7510,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_WORKFLOW_ID: "python-data-charts"
GH_AW_WORKFLOW_NAME: "Python Data Visualization Generator"
@@ -8699,24 +8697,134 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Python Data Visualization Generator"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8746,16 +8854,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8777,7 +8883,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8818,7 +8924,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8834,25 +8940,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/python-data-charts.md b/.github/workflows/python-data-charts.md
index 26f2d1a93fe..cea14e444fc 100644
--- a/.github/workflows/python-data-charts.md
+++ b/.github/workflows/python-data-charts.md
@@ -14,7 +14,7 @@ tools:
imports:
- shared/charts-with-trending.md
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
category: "artifacts"
max: 1
@@ -168,5 +168,4 @@ This report contains data visualizations and trending analysis generated using P
- ✅ **High Quality**: Use DPI 300, clear labels, and seaborn styling
- ✅ **Document Cache**: Report on cache status and trending capabilities
-Refer to the Charts with Trending Guide (imported above) for complete examples, trending patterns, cache-memory integration, and best practices.
-
+Refer to the Charts with Trending Guide (imported above) for complete examples, trending patterns, cache-memory integration, and best practices.
\ No newline at end of file
diff --git a/.github/workflows/release.lock.yml b/.github/workflows/release.lock.yml
index b6f5612fe73..e854eb4eeac 100644
--- a/.github/workflows/release.lock.yml
+++ b/.github/workflows/release.lock.yml
@@ -6502,13 +6502,13 @@ jobs:
- name: Download Go modules
run: go mod download
- name: Generate SBOM (SPDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
with:
artifact-name: sbom.spdx.json
format: spdx-json
output-file: sbom.spdx.json
- name: Generate SBOM (CycloneDX format)
- uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.11
+ uses: anchore/sbom-action@43a17d6e7add2b5535efe4dcae9952337c479a93 # v0.20.10
with:
artifact-name: sbom.cdx.json
format: cyclonedx-json
@@ -6698,7 +6698,7 @@ jobs:
fetch-depth: 0
persist-credentials: false
- name: Release with gh-extension-precompile
- uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2.1.0
+ uses: cli/gh-extension-precompile@9e2237c30f869ad3bcaed6a4be2cd43564dd421b # v2
with:
build_script_override: scripts/build-release.sh
go_version_file: go.mod
diff --git a/.github/workflows/shared/python-dataviz.md b/.github/workflows/shared/python-dataviz.md
index a7d8a98b73f..e6dfc854730 100644
--- a/.github/workflows/shared/python-dataviz.md
+++ b/.github/workflows/shared/python-dataviz.md
@@ -26,7 +26,7 @@ network:
- python
safe-outputs:
- upload-assets:
+ upload-asset:
steps:
- name: Setup Python environment
diff --git a/.github/workflows/stale-repo-identifier.lock.yml b/.github/workflows/stale-repo-identifier.lock.yml
index 85514f6f4fd..3de2b7f9baa 100644
--- a/.github/workflows/stale-repo-identifier.lock.yml
+++ b/.github/workflows/stale-repo-identifier.lock.yml
@@ -231,7 +231,7 @@ jobs:
ORGANIZATION: ${{ env.ORGANIZATION }}
id: stale-repos
name: Run stale_repos tool
- uses: github/stale-repos@a21e55567b83cf3c3f3f9085d3038dc6cee02598 # v3.0.2
+ uses: github/stale-repos@a21e55567b83cf3c3f3f9085d3038dc6cee02598 # v3
- env:
INACTIVE_REPOS: ${{ steps.stale-repos.outputs.inactiveRepos }}
name: Save stale repos output
@@ -2889,7 +2889,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_issue, missing_tool, noop, upload_assets
+ **Available tools**: create_issue, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6478,6 +6478,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7244,13 +7245,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
issues: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔍 *Analysis by [{workflow_name}]({run_url})*\",\"runStarted\":\"🔍 Stale Repository Identifier starting! [{workflow_name}]({run_url}) is analyzing repository activity...\",\"runSuccess\":\"✅ Analysis complete! [{workflow_name}]({run_url}) has finished analyzing stale repositories.\",\"runFailure\":\"⚠️ Analysis interrupted! [{workflow_name}]({run_url}) {status}.\"}"
GH_AW_WORKFLOW_ID: "stale-repo-identifier"
@@ -8209,24 +8207,135 @@ jobs:
(async () => {
await main();
})();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: trending-data-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Stale Repository Identifier"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🔍 *Analysis by [{workflow_name}]({run_url})*\",\"runStarted\":\"🔍 Stale Repository Identifier starting! [{workflow_name}]({run_url}) is analyzing repository activity...\",\"runSuccess\":\"✅ Analysis complete! [{workflow_name}]({run_url}) has finished analyzing stale repositories.\",\"runFailure\":\"⚠️ Analysis interrupted! [{workflow_name}]({run_url}) {status}.\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8256,16 +8365,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8287,7 +8394,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8328,7 +8435,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8344,25 +8451,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: trending-data-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/stale-repo-identifier.md b/.github/workflows/stale-repo-identifier.md
index aa73bb95465..ff6cc1251b7 100644
--- a/.github/workflows/stale-repo-identifier.md
+++ b/.github/workflows/stale-repo-identifier.md
@@ -36,7 +36,7 @@ safe-outputs:
title-prefix: "[Stale Repository] "
labels: [stale-repository, automated-analysis]
max: 10
- upload-assets:
+ upload-asset:
messages:
footer: "> 🔍 *Analysis by [{workflow_name}]({run_url})*"
run-started: "🔍 Stale Repository Identifier starting! [{workflow_name}]({run_url}) is analyzing repository activity..."
@@ -352,4 +352,4 @@ To avoid GitHub API rate limits:
- Create GitHub issues for repositories needing attention (max 10)
- Print summary statistics to stdout
-- Be clear and actionable in recommendations
+- Be clear and actionable in recommendations
\ No newline at end of file
diff --git a/.github/workflows/super-linter.lock.yml b/.github/workflows/super-linter.lock.yml
index 3f60c9a5651..b8ad8e7f5f3 100644
--- a/.github/workflows/super-linter.lock.yml
+++ b/.github/workflows/super-linter.lock.yml
@@ -7546,7 +7546,7 @@ jobs:
persist-credentials: false
- name: Super-linter
id: super-linter
- uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.3.1
+ uses: super-linter/super-linter@47984f49b4e87383eed97890fe2dca6063bbd9c3 # v8.2.1
env:
CREATE_LOG_FILE: "true"
DEFAULT_BRANCH: main
diff --git a/.github/workflows/technical-doc-writer.lock.yml b/.github/workflows/technical-doc-writer.lock.yml
index 1bda54f75fe..20888826117 100644
--- a/.github/workflows/technical-doc-writer.lock.yml
+++ b/.github/workflows/technical-doc-writer.lock.yml
@@ -2550,7 +2550,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_assets
+ **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6148,6 +6148,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -6921,9 +6922,6 @@ jobs:
pull-requests: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📝 *Documentation by [{workflow_name}]({run_url})*\",\"runStarted\":\"✍️ The Technical Writer begins! [{workflow_name}]({run_url}) is documenting this {event_type}...\",\"runSuccess\":\"📝 Documentation complete! [{workflow_name}]({run_url}) has written the docs. Clear as crystal! ✨\",\"runFailure\":\"✍️ Writer's block! [{workflow_name}]({run_url}) {status}. The page remains blank...\"}"
GH_AW_WORKFLOW_ID: "technical-doc-writer"
@@ -8731,24 +8729,135 @@ jobs:
return createdComments;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Technical Doc Writer"
+ GH_AW_ENGINE_ID: "copilot"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 📝 *Documentation by [{workflow_name}]({run_url})*\",\"runStarted\":\"✍️ The Technical Writer begins! [{workflow_name}]({run_url}) is documenting this {event_type}...\",\"runSuccess\":\"📝 Documentation complete! [{workflow_name}]({run_url}) has written the docs. Clear as crystal! ✨\",\"runFailure\":\"✍️ Writer's block! [{workflow_name}]({run_url}) {status}. The page remains blank...\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8778,16 +8887,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8809,7 +8916,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8850,7 +8957,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8866,25 +8973,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/technical-doc-writer.md b/.github/workflows/technical-doc-writer.md
index 3ad96a21270..1843a973ed2 100644
--- a/.github/workflows/technical-doc-writer.md
+++ b/.github/workflows/technical-doc-writer.md
@@ -34,7 +34,7 @@ safe-outputs:
labels: [documentation]
reviewers: copilot
draft: false
- upload-assets:
+ upload-asset:
messages:
footer: "> 📝 *Documentation by [{workflow_name}]({run_url})*"
run-started: "✍️ The Technical Writer begins! [{workflow_name}]({run_url}) is documenting this {event_type}..."
@@ -106,4 +106,4 @@ When reviewing documentation for the specified topic in the **docs/** folder, ap
Keep your feedback specific, actionable, and empathetic. Focus on the most impactful improvements for the topic: "${{ github.event.inputs.topic }}"
-You have access to cache-memory for persistent storage across runs, which you can use to track documentation patterns and improvement suggestions.
+You have access to cache-memory for persistent storage across runs, which you can use to track documentation patterns and improvement suggestions.
\ No newline at end of file
diff --git a/.github/workflows/unbloat-docs.lock.yml b/.github/workflows/unbloat-docs.lock.yml
index 420094d182a..a3615e98200 100644
--- a/.github/workflows/unbloat-docs.lock.yml
+++ b/.github/workflows/unbloat-docs.lock.yml
@@ -2834,7 +2834,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_assets
+ **Available tools**: add_comment, create_pull_request, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6141,6 +6141,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7126,9 +7127,6 @@ jobs:
pull-requests: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "claude"
GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🗜️ *Compressed by [{workflow_name}]({run_url})*\",\"runStarted\":\"📦 Time to slim down! [{workflow_name}]({run_url}) is trimming the excess from this {event_type}...\",\"runSuccess\":\"🗜️ Docs on a diet! [{workflow_name}]({run_url}) has removed the bloat. Lean and mean! 💪\",\"runFailure\":\"📦 Unbloating paused! [{workflow_name}]({run_url}) {status}. The docs remain... fluffy.\"}"
GH_AW_WORKFLOW_ID: "unbloat-docs"
@@ -8938,24 +8936,135 @@ jobs:
return createdComments;
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Documentation Unbloat"
+ GH_AW_ENGINE_ID: "claude"
+ GH_AW_SAFE_OUTPUT_MESSAGES: "{\"footer\":\"\\u003e 🗜️ *Compressed by [{workflow_name}]({run_url})*\",\"runStarted\":\"📦 Time to slim down! [{workflow_name}]({run_url}) is trimming the excess from this {event_type}...\",\"runSuccess\":\"🗜️ Docs on a diet! [{workflow_name}]({run_url}) has removed the bloat. Lean and mean! 💪\",\"runFailure\":\"📦 Unbloating paused! [{workflow_name}]({run_url}) {status}. The docs remain... fluffy.\"}"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8985,16 +9094,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -9016,7 +9123,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -9057,7 +9164,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -9073,25 +9180,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/unbloat-docs.md b/.github/workflows/unbloat-docs.md
index c7060604b8c..897ec0f7feb 100644
--- a/.github/workflows/unbloat-docs.md
+++ b/.github/workflows/unbloat-docs.md
@@ -69,7 +69,7 @@ safe-outputs:
draft: true
add-comment:
max: 1
- upload-assets:
+ upload-asset:
messages:
footer: "> 🗜️ *Compressed by [{workflow_name}]({run_url})*"
run-started: "📦 Time to slim down! [{workflow_name}]({run_url}) is trimming the excess from this {event_type}..."
@@ -338,4 +338,4 @@ A successful run:
- ✅ Includes HD screenshots (1920x1080) of the modified documentation page(s) in the Astro Starlight website
- ✅ Reports any blocked domains for CSS/fonts (if encountered)
-Begin by scanning the docs directory and selecting the best candidate for improvement!
+Begin by scanning the docs directory and selecting the best candidate for improvement!
\ No newline at end of file
diff --git a/.github/workflows/weekly-issue-summary.lock.yml b/.github/workflows/weekly-issue-summary.lock.yml
index 632cf88ad20..7f6a36c37f3 100644
--- a/.github/workflows/weekly-issue-summary.lock.yml
+++ b/.github/workflows/weekly-issue-summary.lock.yml
@@ -2659,7 +2659,7 @@ jobs:
To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls.
- **Available tools**: create_discussion, missing_tool, noop, upload_assets
+ **Available tools**: create_discussion, missing_tool, noop, upload_asset
**Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped.
@@ -6246,6 +6246,7 @@ jobs:
- detection
- safe_outputs
- update_cache_memory
+ - upload_assets
if: (always()) && (needs.agent.result != 'skipped')
runs-on: ubuntu-slim
permissions:
@@ -7014,13 +7015,10 @@ jobs:
if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true')
runs-on: ubuntu-slim
permissions:
- contents: write
+ contents: read
discussions: write
timeout-minutes: 15
env:
- GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
- GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
- GH_AW_ASSETS_MAX_SIZE_KB: 10240
GH_AW_ENGINE_ID: "copilot"
GH_AW_TRACKER_ID: "weekly-issue-summary"
GH_AW_WORKFLOW_ID: "weekly-issue-summary"
@@ -8205,24 +8203,135 @@ jobs:
core.info(`Successfully created ${createdDiscussions.length} discussion(s)`);
}
(async () => { await main(); })();
- - name: Upload Assets
+
+ update_cache_memory:
+ needs:
+ - agent
+ - detection
+ if: always() && needs.detection.outputs.success == 'true'
+ runs-on: ubuntu-latest
+ permissions: {}
+ steps:
+ - name: Download cache-memory artifact (default)
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ continue-on-error: true
+ with:
+ name: cache-memory
+ path: /tmp/gh-aw/cache-memory
+ - name: Save cache-memory to cache (default)
+ uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
+ with:
+ key: memory-${{ github.workflow }}-${{ github.run_id }}
+ path: /tmp/gh-aw/cache-memory
+
+ upload_assets:
+ needs: agent
+ if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
+ runs-on: ubuntu-slim
+ permissions:
+ contents: write
+ timeout-minutes: 10
+ outputs:
+ branch_name: ${{ steps.upload_assets.outputs.branch_name }}
+ published_count: ${{ steps.upload_assets.outputs.published_count }}
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+ - name: Configure Git credentials
+ env:
+ REPO_NAME: ${{ github.repository }}
+ SERVER_URL: ${{ github.server_url }}
+ run: |
+ git config --global user.email "github-actions[bot]@users.noreply.github.com"
+ git config --global user.name "github-actions[bot]"
+ # Re-authenticate git with GitHub token
+ SERVER_URL_STRIPPED="${SERVER_URL#https://}"
+ git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git"
+ echo "Git configured with standard GitHub Actions identity"
+ - name: Download assets
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: safe-outputs-assets
+ path: /tmp/gh-aw/safeoutputs/assets/
+ - name: List downloaded asset files
+ continue-on-error: true
+ run: |
+ echo "Downloaded asset files:"
+ ls -la /tmp/gh-aw/safeoutputs/assets/
+ - name: Download agent output artifact
+ continue-on-error: true
+ uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
+ with:
+ name: agent_output.json
+ path: /tmp/gh-aw/safeoutputs/
+ - name: Setup agent output environment variable
+ run: |
+ mkdir -p /tmp/gh-aw/safeoutputs/
+ find "/tmp/gh-aw/safeoutputs/" -type f -print
+ echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV"
+ - name: Upload Assets to Orphaned Branch
id: upload_assets
- if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (contains(needs.agent.outputs.output_types, 'upload_asset'))
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }}
+ GH_AW_ASSETS_BRANCH: "assets/${{ github.workflow }}"
+ GH_AW_ASSETS_MAX_SIZE_KB: 10240
+ GH_AW_ASSETS_ALLOWED_EXTS: ".png,.jpg,.jpeg"
+ GH_AW_WORKFLOW_NAME: "Weekly Issue Summary"
+ GH_AW_TRACKER_ID: "weekly-issue-summary"
+ GH_AW_ENGINE_ID: "copilot"
with:
github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }}
script: |
- globalThis.github = github;
- globalThis.context = context;
- globalThis.core = core;
- globalThis.exec = exec;
- globalThis.io = io;
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
- const { loadAgentOutput } = require('/tmp/gh-aw/scripts/load_agent_output.cjs');
+ const MAX_LOG_CONTENT_LENGTH = 10000;
+ function truncateForLogging(content) {
+ if (content.length <= MAX_LOG_CONTENT_LENGTH) {
+ return content;
+ }
+ return content.substring(0, MAX_LOG_CONTENT_LENGTH) + `\n... (truncated, total length: ${content.length})`;
+ }
+ function loadAgentOutput() {
+ const agentOutputFile = process.env.GH_AW_AGENT_OUTPUT;
+ if (!agentOutputFile) {
+ core.info("No GH_AW_AGENT_OUTPUT environment variable found");
+ return { success: false };
+ }
+ let outputContent;
+ try {
+ outputContent = fs.readFileSync(agentOutputFile, "utf8");
+ } catch (error) {
+ const errorMessage = `Error reading agent output file: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ return { success: false, error: errorMessage };
+ }
+ if (outputContent.trim() === "") {
+ core.info("Agent output content is empty");
+ return { success: false };
+ }
+ core.info(`Agent output content length: ${outputContent.length}`);
+ let validatedOutput;
+ try {
+ validatedOutput = JSON.parse(outputContent);
+ } catch (error) {
+ const errorMessage = `Error parsing agent output JSON: ${error instanceof Error ? error.message : String(error)}`;
+ core.error(errorMessage);
+ core.info(`Failed to parse content:\n${truncateForLogging(outputContent)}`);
+ return { success: false, error: errorMessage };
+ }
+ if (!validatedOutput.items || !Array.isArray(validatedOutput.items)) {
+ core.info("No valid items found in agent output");
+ core.info(`Parsed content: ${truncateForLogging(JSON.stringify(validatedOutput))}`);
+ return { success: false };
+ }
+ return { success: true, items: validatedOutput.items };
+ }
function normalizeBranchName(branchName) {
if (!branchName || typeof branchName !== "string" || branchName.trim() === "") {
return branchName;
@@ -8252,16 +8361,14 @@ jobs:
core.setOutput("branch_name", normalizedBranchName);
return;
}
- const uploadItems = result.items.filter( item => item.type === "upload_assets");
- const uploadAssetItems = result.items.filter( item => item.type === "upload_asset");
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
- if (allUploadItems.length === 0) {
+ const uploadItems = result.items.filter( item => item.type === "upload_asset");
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
try {
@@ -8283,7 +8390,7 @@ jobs:
await exec.exec(`git rm -rf .`);
await exec.exec(`git clean -fdx`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
if (!fileName || !sha || !targetFileName) {
@@ -8324,7 +8431,7 @@ jobs:
core.summary.addRaw("## Assets").addRaw(`Successfully uploaded **${uploadCount}** assets to branch \`${normalizedBranchName}\``).addRaw("");
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
@@ -8340,25 +8447,5 @@ jobs:
core.setOutput("upload_count", uploadCount.toString());
core.setOutput("branch_name", normalizedBranchName);
}
- (async () => { await main(); })();
-
- update_cache_memory:
- needs:
- - agent
- - detection
- if: always() && needs.detection.outputs.success == 'true'
- runs-on: ubuntu-latest
- permissions: {}
- steps:
- - name: Download cache-memory artifact (default)
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
- continue-on-error: true
- with:
- name: cache-memory
- path: /tmp/gh-aw/cache-memory
- - name: Save cache-memory to cache (default)
- uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
- with:
- key: memory-${{ github.workflow }}-${{ github.run_id }}
- path: /tmp/gh-aw/cache-memory
+ await main();
diff --git a/.github/workflows/weekly-issue-summary.md b/.github/workflows/weekly-issue-summary.md
index d1187afa0ce..2e10242cc6d 100644
--- a/.github/workflows/weekly-issue-summary.md
+++ b/.github/workflows/weekly-issue-summary.md
@@ -25,7 +25,7 @@ tools:
toolsets:
- issues
safe-outputs:
- upload-assets:
+ upload-asset:
create-discussion:
title-prefix: "[Weekly Summary] "
category: "Audits"
diff --git a/docs/slides/index.md b/docs/slides/index.md
index 5cabb34bbdf..b5ee7536ec2 100644
--- a/docs/slides/index.md
+++ b/docs/slides/index.md
@@ -320,7 +320,7 @@ tools:
playwright: # Headless browser automation
safe-outputs:
create-issue:
- upload-assets: # Attach screenshots to artifacts
+ upload-asset: # Attach screenshots to artifacts
---
Test the web application:
1. Navigate to the deployed preview URL
diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md
index edcd93074bd..5fc69782b9d 100644
--- a/docs/src/content/docs/reference/frontmatter-full.md
+++ b/docs/src/content/docs/reference/frontmatter-full.md
@@ -2629,7 +2629,7 @@ safe-outputs:
# This field supports multiple formats (oneOf):
# Option 1: Configuration for publishing assets to an orphaned git branch
- upload-assets:
+ upload-asset:
# Branch name (default: 'assets/${{ github.workflow }}')
# (optional)
branch: "example-value"
@@ -2653,7 +2653,7 @@ safe-outputs:
github-token: "${{ secrets.GITHUB_TOKEN }}"
# Option 2: Enable asset publishing with default configuration
- upload-assets: null
+ upload-asset: null
# (optional)
# This field supports multiple formats (oneOf):
diff --git a/docs/src/content/docs/reference/glossary.md b/docs/src/content/docs/reference/glossary.md
index ae941ac5800..85842673cb4 100644
--- a/docs/src/content/docs/reference/glossary.md
+++ b/docs/src/content/docs/reference/glossary.md
@@ -106,7 +106,7 @@ A safe output capability that allows workflows to upload generated files (screen
```yaml
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/my-workflow" # branch name (default: "assets/${{ github.workflow }}")
max-size: 10240 # max file size in KB (default: 10MB)
allowed-exts: [.png, .jpg, .svg] # allowed extensions
diff --git a/docs/src/content/docs/reference/safe-outputs.md b/docs/src/content/docs/reference/safe-outputs.md
index c7780c28886..e153c9578c6 100644
--- a/docs/src/content/docs/reference/safe-outputs.md
+++ b/docs/src/content/docs/reference/safe-outputs.md
@@ -57,7 +57,7 @@ This declares that the workflow should create at most one new issue. The AI agen
| [**Assign to User**](#assign-to-user-assign-to-user) | `assign-to-user:` | Assign users to issues | 1 | ✅ |
| [**Push to PR Branch**](#push-to-pr-branch-push-to-pull-request-branch) | `push-to-pull-request-branch:` | Push changes to PR branch | 1 | ❌ |
| [**Update Release**](#release-updates-update-release) | `update-release:` | Update GitHub release descriptions | 1 | ✅ |
-| [**Upload Assets**](#asset-uploads-upload-assets) | `upload-assets:` | Upload files to orphaned git branch | 10 | ❌ |
+| [**Upload Assets**](#asset-uploads-upload-asset) | `upload-asset:` | Upload files to orphaned git branch | 10 | ❌ |
| [**Code Scanning Alerts**](#code-scanning-alerts-create-code-scanning-alert) | `create-code-scanning-alert:` | Generate SARIF security advisories | unlimited | ❌ |
| [**No-Op**](#no-op-logging-noop) | `noop:` | Log completion message for transparency (auto-enabled) | 1 | ❌ |
| [**Missing Tool**](#missing-tool-reporting-missing-tool) | `missing-tool:` | Report missing tools (auto-enabled) | unlimited | ❌ |
@@ -462,7 +462,7 @@ safe-outputs:
Agent output format: `{"type": "update_release", "tag": "v1.0.0", "operation": "replace", "body": "..."}`. The `tag` field is optional for release events (inferred from context). Workflow needs read access; only the generated job receives write permissions.
-### Asset Uploads (`upload-assets:`)
+### Asset Uploads (`upload-asset:`)
Uploads generated files (screenshots, charts, reports, diagrams) to an orphaned git branch for persistent, version-controlled storage. Assets are uploaded without requiring elevated permissions during agent execution—a separate job with `contents: write` handles the actual commit and push.
@@ -475,7 +475,7 @@ Uploads generated files (screenshots, charts, reports, diagrams) to an orphaned
```yaml wrap
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/my-workflow" # branch name (default: `"assets/${{ github.workflow }}"`)
max-size: 5120 # max file size in KB (default: 10240 = 10MB)
allowed-exts: [.png, .jpg, .svg] # allowed extensions (default: [.png, .jpg, .jpeg])
@@ -567,7 +567,7 @@ on: schedule
tools:
playwright:
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/screenshots"
allowed-exts: [.png]
max: 50
@@ -587,7 +587,7 @@ on: schedule
tools:
bash:
safe-outputs:
- upload-assets:
+ upload-asset:
branch: "assets/charts"
allowed-exts: [.png, .svg]
max-size: 2048
diff --git a/pkg/cli/fix_codemods.go b/pkg/cli/fix_codemods.go
index eb334adc17b..afb744e141b 100644
--- a/pkg/cli/fix_codemods.go
+++ b/pkg/cli/fix_codemods.go
@@ -45,6 +45,7 @@ func GetAllCodemods() []Codemod {
getNetworkFirewallCodemod(),
getCommandToSlashCommandCodemod(),
getSafeInputsModeCodemod(),
+ getUploadAssetsCodemod(),
}
}
@@ -488,3 +489,101 @@ func getSafeInputsModeCodemod() Codemod {
},
}
}
+
+// getUploadAssetsCodemod creates a codemod for migrating upload-assets to upload-asset (plural to singular)
+func getUploadAssetsCodemod() Codemod {
+ return Codemod{
+ ID: "upload-assets-to-upload-asset-migration",
+ Name: "Migrate upload-assets to upload-asset",
+ Description: "Replaces deprecated 'safe-outputs.upload-assets' field with 'safe-outputs.upload-asset' (plural to singular)",
+ IntroducedIn: "0.3.0",
+ Apply: func(content string, frontmatter map[string]any) (string, bool, error) {
+ // Check if safe-outputs.upload-assets exists
+ safeOutputsValue, hasSafeOutputs := frontmatter["safe-outputs"]
+ if !hasSafeOutputs {
+ return content, false, nil
+ }
+
+ safeOutputsMap, ok := safeOutputsValue.(map[string]any)
+ if !ok {
+ return content, false, nil
+ }
+
+ // Check if upload-assets field exists in safe-outputs (plural is deprecated)
+ _, hasUploadAssets := safeOutputsMap["upload-assets"]
+ if !hasUploadAssets {
+ return content, false, nil
+ }
+
+ // Parse frontmatter to get raw lines
+ result, err := parser.ExtractFrontmatterFromContent(content)
+ if err != nil {
+ return content, false, fmt.Errorf("failed to parse frontmatter: %w", err)
+ }
+
+ // Find and replace upload-assets with upload-asset within the safe-outputs block
+ var modified bool
+ var inSafeOutputsBlock bool
+ var safeOutputsIndent string
+
+ frontmatterLines := make([]string, len(result.FrontmatterLines))
+
+ for i, line := range result.FrontmatterLines {
+ trimmedLine := strings.TrimSpace(line)
+
+ // Track if we're in the safe-outputs block
+ if strings.HasPrefix(trimmedLine, "safe-outputs:") {
+ inSafeOutputsBlock = true
+ safeOutputsIndent = line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+ frontmatterLines[i] = line
+ continue
+ }
+
+ // Check if we've left the safe-outputs block (new top-level key with same or less indentation)
+ if inSafeOutputsBlock && len(trimmedLine) > 0 && !strings.HasPrefix(trimmedLine, "#") {
+ currentIndent := line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+ if len(currentIndent) <= len(safeOutputsIndent) && strings.Contains(line, ":") {
+ inSafeOutputsBlock = false
+ }
+ }
+
+ // Replace upload-assets with upload-asset if in safe-outputs block
+ if inSafeOutputsBlock && strings.HasPrefix(trimmedLine, "upload-assets:") {
+ // Preserve indentation
+ leadingSpace := line[:len(line)-len(strings.TrimLeft(line, " \t"))]
+
+ // Extract the value and any trailing comment
+ parts := strings.SplitN(line, ":", 2)
+ if len(parts) >= 2 {
+ valueAndComment := parts[1]
+ frontmatterLines[i] = fmt.Sprintf("%supload-asset:%s", leadingSpace, valueAndComment)
+ modified = true
+ codemodsLog.Printf("Replaced safe-outputs.upload-assets with safe-outputs.upload-asset on line %d", i+1)
+ } else {
+ frontmatterLines[i] = line
+ }
+ } else {
+ frontmatterLines[i] = line
+ }
+ }
+
+ if !modified {
+ return content, false, nil
+ }
+
+ // Reconstruct the content
+ var lines []string
+ lines = append(lines, "---")
+ lines = append(lines, frontmatterLines...)
+ lines = append(lines, "---")
+ if result.Markdown != "" {
+ lines = append(lines, "")
+ lines = append(lines, result.Markdown)
+ }
+
+ newContent := strings.Join(lines, "\n")
+ codemodsLog.Print("Applied upload-assets to upload-asset migration")
+ return newContent, true, nil
+ },
+ }
+}
diff --git a/pkg/cli/workflows/test-claude-playwright-screenshots.md b/pkg/cli/workflows/test-claude-playwright-screenshots.md
index 21c354e062f..d6e5f4612d2 100644
--- a/pkg/cli/workflows/test-claude-playwright-screenshots.md
+++ b/pkg/cli/workflows/test-claude-playwright-screenshots.md
@@ -8,7 +8,7 @@ on:
- pelikhan/*
safe-outputs:
staged: true
- upload-assets:
+ upload-asset:
create-issue:
title-prefix: "[docs] "
engine:
diff --git a/pkg/cli/workflows/test-copilot-playwright-screenshots.md b/pkg/cli/workflows/test-copilot-playwright-screenshots.md
index 5ed7bcc476a..fe4be9d0294 100644
--- a/pkg/cli/workflows/test-copilot-playwright-screenshots.md
+++ b/pkg/cli/workflows/test-copilot-playwright-screenshots.md
@@ -8,7 +8,7 @@ on:
- pelikhan/*
safe-outputs:
staged: true
- upload-assets:
+ upload-asset:
create-issue:
title-prefix: "[docs] "
engine:
diff --git a/pkg/cli/workflows/test-playwright-args.md b/pkg/cli/workflows/test-playwright-args.md
index 746421cd86a..cade0161fd6 100644
--- a/pkg/cli/workflows/test-playwright-args.md
+++ b/pkg/cli/workflows/test-playwright-args.md
@@ -5,7 +5,7 @@ permissions: read-all
engine: claude
safe-outputs:
staged: true
- upload-assets:
+ upload-asset:
create-issue:
title-prefix: "[test] "
tools:
diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json
index 56aacaef4a7..71a285db04c 100644
--- a/pkg/parser/schemas/main_workflow_schema.json
+++ b/pkg/parser/schemas/main_workflow_schema.json
@@ -4200,7 +4200,7 @@
}
]
},
- "upload-assets": {
+ "upload-asset": {
"oneOf": [
{
"type": "object",
diff --git a/pkg/workflow/branch_normalize_integration_test.go b/pkg/workflow/branch_normalize_integration_test.go
index 54e3b51daa8..d49c3168440 100644
--- a/pkg/workflow/branch_normalize_integration_test.go
+++ b/pkg/workflow/branch_normalize_integration_test.go
@@ -105,7 +105,7 @@ func TestUploadAssetsJobHasInlinedNormalization(t *testing.T) {
}
// Build the upload_assets job
- job, err := compiler.buildUploadAssetsJob(data, "agent")
+ job, err := compiler.buildUploadAssetsJob(data, "agent", false)
if err != nil {
t.Fatalf("Failed to build upload_assets job: %v", err)
}
diff --git a/pkg/workflow/checkout_persist_credentials_test.go b/pkg/workflow/checkout_persist_credentials_test.go
index 2151f776fd5..89af95635a2 100644
--- a/pkg/workflow/checkout_persist_credentials_test.go
+++ b/pkg/workflow/checkout_persist_credentials_test.go
@@ -99,7 +99,7 @@ permissions:
issues: read
pull-requests: read
safe-outputs:
- upload-assets:
+ upload-asset:
engine: claude
strict: false
---`,
diff --git a/pkg/workflow/compiler_safe_output_jobs.go b/pkg/workflow/compiler_safe_output_jobs.go
index 832681fc03a..376707756b5 100644
--- a/pkg/workflow/compiler_safe_output_jobs.go
+++ b/pkg/workflow/compiler_safe_output_jobs.go
@@ -63,6 +63,24 @@ func (c *Compiler) buildSafeOutputsJobs(data *WorkflowData, jobName, markdownPat
safeOutputJobNames = append(safeOutputJobNames, safeJobNames...)
compilerSafeOutputJobsLog.Printf("Added %d custom safe-job names to conclusion dependencies", len(safeJobNames))
+ // Build upload_assets job as a separate job if configured
+ // This needs to be separate from the consolidated safe_outputs job because it requires:
+ // 1. Git configuration for pushing to orphaned branches
+ // 2. Checkout with proper credentials
+ // 3. Different permissions (contents: write)
+ if data.SafeOutputs != nil && data.SafeOutputs.UploadAssets != nil {
+ compilerSafeOutputJobsLog.Print("Building separate upload_assets job")
+ uploadAssetsJob, err := c.buildUploadAssetsJob(data, jobName, threatDetectionEnabled)
+ if err != nil {
+ return fmt.Errorf("failed to build upload_assets job: %w", err)
+ }
+ if err := c.jobManager.AddJob(uploadAssetsJob); err != nil {
+ return fmt.Errorf("failed to add upload_assets job: %w", err)
+ }
+ safeOutputJobNames = append(safeOutputJobNames, uploadAssetsJob.Name)
+ compilerSafeOutputJobsLog.Printf("Added separate upload_assets job")
+ }
+
// Build conclusion job if add-comment is configured OR if command trigger is configured with reactions
// This job runs last, after all safe output jobs (and push_repo_memory if configured), to update the activation comment on failure
// The buildConclusionJob function itself will decide whether to create the job based on the configuration
diff --git a/pkg/workflow/compiler_safe_outputs_core.go b/pkg/workflow/compiler_safe_outputs_core.go
index c351100952b..377612e8e20 100644
--- a/pkg/workflow/compiler_safe_outputs_core.go
+++ b/pkg/workflow/compiler_safe_outputs_core.go
@@ -2,7 +2,6 @@ package workflow
import (
"fmt"
- "strings"
"github.com/githubnext/gh-aw/pkg/constants"
"github.com/githubnext/gh-aw/pkg/logger"
@@ -112,9 +111,8 @@ func (c *Compiler) buildConsolidatedSafeOutputsJob(data *WorkflowData, mainJobNa
if data.SafeOutputs.PushToPullRequestBranch != nil {
scriptNames = append(scriptNames, "push_to_pull_request_branch")
}
- if data.SafeOutputs.UploadAssets != nil {
- scriptNames = append(scriptNames, "upload_assets")
- }
+ // Upload Assets is handled as a separate job (not in consolidated job)
+ // See buildUploadAssetsJob for the separate job implementation
if data.SafeOutputs.UpdateRelease != nil {
scriptNames = append(scriptNames, "update_release")
}
@@ -397,15 +395,9 @@ func (c *Compiler) buildConsolidatedSafeOutputsJob(data *WorkflowData, mainJobNa
permissions.Merge(NewPermissionsContentsWriteIssuesWritePRWrite())
}
- // 18. Upload Assets step
- if data.SafeOutputs.UploadAssets != nil {
- stepConfig := c.buildUploadAssetsStepConfig(data, mainJobName, threatDetectionEnabled)
- stepYAML := c.buildConsolidatedSafeOutputStep(data, stepConfig)
- steps = append(steps, stepYAML...)
- safeOutputStepNames = append(safeOutputStepNames, stepConfig.StepID)
-
- permissions.Merge(NewPermissionsContentsWrite())
- }
+ // 18. Upload Assets - now handled as a separate job (see buildSafeOutputsJobs)
+ // This was moved out of the consolidated job to allow proper git configuration
+ // for pushing to orphaned branches
// 19. Update Release step
if data.SafeOutputs.UpdateRelease != nil {
@@ -655,12 +647,8 @@ func (c *Compiler) buildJobLevelSafeOutputEnvVars(data *WorkflowData, workflowID
}
}
- // Add asset upload configuration if present (applies to all steps)
- if data.SafeOutputs.UploadAssets != nil {
- envVars["GH_AW_ASSETS_BRANCH"] = fmt.Sprintf("%q", data.SafeOutputs.UploadAssets.BranchName)
- envVars["GH_AW_ASSETS_MAX_SIZE_KB"] = fmt.Sprintf("%d", data.SafeOutputs.UploadAssets.MaxSizeKB)
- envVars["GH_AW_ASSETS_ALLOWED_EXTS"] = fmt.Sprintf("%q", strings.Join(data.SafeOutputs.UploadAssets.AllowedExts, ","))
- }
+ // Note: Asset upload configuration is not needed here because upload_assets
+ // is now handled as a separate job (see buildUploadAssetsJob)
return envVars
}
diff --git a/pkg/workflow/compiler_safe_outputs_shared.go b/pkg/workflow/compiler_safe_outputs_shared.go
index 3651364d72f..238e077ca19 100644
--- a/pkg/workflow/compiler_safe_outputs_shared.go
+++ b/pkg/workflow/compiler_safe_outputs_shared.go
@@ -93,23 +93,3 @@ func (c *Compiler) buildHideCommentStepConfig(data *WorkflowData, mainJobName st
Token: cfg.GitHubToken,
}
}
-
-// buildUploadAssetsStepConfig builds the configuration for uploading assets
-func (c *Compiler) buildUploadAssetsStepConfig(data *WorkflowData, mainJobName string, threatDetectionEnabled bool) SafeOutputStepConfig {
- cfg := data.SafeOutputs.UploadAssets
-
- var customEnvVars []string
- customEnvVars = append(customEnvVars, c.buildStepLevelSafeOutputEnvVars(data, "")...)
-
- condition := BuildSafeOutputType("upload_asset")
-
- return SafeOutputStepConfig{
- StepName: "Upload Assets",
- StepID: "upload_assets",
- ScriptName: "upload_assets",
- Script: getUploadAssetsScript(),
- CustomEnvVars: customEnvVars,
- Condition: condition,
- Token: cfg.GitHubToken,
- }
-}
diff --git a/pkg/workflow/compiler_types.go b/pkg/workflow/compiler_types.go
index aa50fde12ba..c61327277a4 100644
--- a/pkg/workflow/compiler_types.go
+++ b/pkg/workflow/compiler_types.go
@@ -295,7 +295,7 @@ type SafeOutputsConfig struct {
UpdateIssues *UpdateIssuesConfig `yaml:"update-issues,omitempty"`
UpdatePullRequests *UpdatePullRequestsConfig `yaml:"update-pull-request,omitempty"` // Update GitHub pull request title/body
PushToPullRequestBranch *PushToPullRequestBranchConfig `yaml:"push-to-pull-request-branch,omitempty"`
- UploadAssets *UploadAssetsConfig `yaml:"upload-assets,omitempty"`
+ UploadAssets *UploadAssetsConfig `yaml:"upload-asset,omitempty"`
UpdateRelease *UpdateReleaseConfig `yaml:"update-release,omitempty"` // Update GitHub release descriptions
CreateAgentTasks *CreateAgentTaskConfig `yaml:"create-agent-task,omitempty"` // Create GitHub Copilot agent tasks
UpdateProjects *UpdateProjectConfig `yaml:"update-project,omitempty"` // Smart project board management (create/add/update)
diff --git a/pkg/workflow/imports.go b/pkg/workflow/imports.go
index a872b8383ab..531bb0a1ca5 100644
--- a/pkg/workflow/imports.go
+++ b/pkg/workflow/imports.go
@@ -456,7 +456,7 @@ func hasSafeOutputType(config *SafeOutputsConfig, key string) bool {
return config.UpdatePullRequests != nil
case "push-to-pull-request-branch":
return config.PushToPullRequestBranch != nil
- case "upload-assets":
+ case "upload-asset":
return config.UploadAssets != nil
case "update-release":
return config.UpdateRelease != nil
diff --git a/pkg/workflow/js/upload_assets.cjs b/pkg/workflow/js/upload_assets.cjs
index ee71a03ac38..325b12164c5 100644
--- a/pkg/workflow/js/upload_assets.cjs
+++ b/pkg/workflow/js/upload_assets.cjs
@@ -76,22 +76,17 @@ async function main() {
return;
}
- // Find all upload-assets items
- const uploadItems = result.items.filter(/** @param {any} item */ item => item.type === "upload_assets");
+ // Find all upload-asset items
+ const uploadItems = result.items.filter(/** @param {any} item */ item => item.type === "upload_asset");
- // Also check for legacy upload-asset items
- const uploadAssetItems = result.items.filter(/** @param {any} item */ item => item.type === "upload_asset");
-
- const allUploadItems = [...uploadItems, ...uploadAssetItems];
-
- if (allUploadItems.length === 0) {
+ if (uploadItems.length === 0) {
core.info("No upload-asset items found in agent output");
core.setOutput("upload_count", "0");
core.setOutput("branch_name", normalizedBranchName);
return;
}
- core.info(`Found ${allUploadItems.length} upload-asset item(s)`);
+ core.info(`Found ${uploadItems.length} upload-asset item(s)`);
let uploadCount = 0;
let hasChanges = false;
@@ -121,7 +116,7 @@ async function main() {
}
// Process each asset
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
try {
const { fileName, sha, size, targetFileName } = asset;
@@ -179,7 +174,7 @@ async function main() {
core.info(`Successfully uploaded ${uploadCount} assets to branch ${normalizedBranchName}`);
}
- for (const asset of allUploadItems) {
+ for (const asset of uploadItems) {
if (asset.fileName && asset.sha && asset.size && asset.url) {
core.summary.addRaw(`- [\`${asset.fileName}\`](${asset.url}) → \`${asset.targetFileName}\` (${asset.size} bytes)`);
}
diff --git a/pkg/workflow/publish_assets.go b/pkg/workflow/publish_assets.go
index 51866335a1c..da0a13e53d4 100644
--- a/pkg/workflow/publish_assets.go
+++ b/pkg/workflow/publish_assets.go
@@ -4,6 +4,7 @@ import (
"fmt"
"strings"
+ "github.com/githubnext/gh-aw/pkg/constants"
"github.com/githubnext/gh-aw/pkg/logger"
)
@@ -19,8 +20,8 @@ type UploadAssetsConfig struct {
// parseUploadAssetConfig handles upload-asset configuration
func (c *Compiler) parseUploadAssetConfig(outputMap map[string]any) *UploadAssetsConfig {
- if configData, exists := outputMap["upload-assets"]; exists {
- publishAssetsLog.Print("Parsing upload-assets configuration")
+ if configData, exists := outputMap["upload-asset"]; exists {
+ publishAssetsLog.Print("Parsing upload-asset configuration")
config := &UploadAssetsConfig{
BranchName: "assets/${{ github.workflow }}", // Default branch name
MaxSizeKB: 10240, // Default 10MB
@@ -64,10 +65,10 @@ func (c *Compiler) parseUploadAssetConfig(outputMap map[string]any) *UploadAsset
// Parse common base fields with default max of 0 (no limit)
c.parseBaseSafeOutputConfig(configMap, &config.BaseSafeOutputConfig, 0)
- publishAssetsLog.Printf("Parsed upload-assets config: branch=%s, max_size_kb=%d, allowed_exts=%d", config.BranchName, config.MaxSizeKB, len(config.AllowedExts))
+ publishAssetsLog.Printf("Parsed upload-asset config: branch=%s, max_size_kb=%d, allowed_exts=%d", config.BranchName, config.MaxSizeKB, len(config.AllowedExts))
} else if configData == nil {
// Handle null case: create config with defaults
- publishAssetsLog.Print("Using default upload-assets configuration")
+ publishAssetsLog.Print("Using default upload-asset configuration")
return config
}
@@ -78,8 +79,8 @@ func (c *Compiler) parseUploadAssetConfig(outputMap map[string]any) *UploadAsset
}
// buildUploadAssetsJob creates the publish_assets job
-func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string) (*Job, error) {
- publishAssetsLog.Printf("Building upload_assets job: workflow=%s, main_job=%s", data.Name, mainJobName)
+func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string, threatDetectionEnabled bool) (*Job, error) {
+ publishAssetsLog.Printf("Building upload_assets job: workflow=%s, main_job=%s, threat_detection=%v", data.Name, mainJobName, threatDetectionEnabled)
if data.SafeOutputs == nil || data.SafeOutputs.UploadAssets == nil {
return nil, fmt.Errorf("safe-outputs.upload-asset configuration is required")
@@ -129,6 +130,13 @@ func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string)
// Build the job condition using expression tree
jobCondition := BuildSafeOutputType("upload_asset")
+ // Build job dependencies
+ needs := []string{mainJobName}
+ if threatDetectionEnabled {
+ needs = append(needs, constants.DetectionJobName)
+ publishAssetsLog.Printf("Added detection job dependency for upload_assets")
+ }
+
// Use the shared builder function to create the job
return c.buildSafeOutputJob(data, SafeOutputJobConfig{
JobName: "upload_assets",
@@ -142,5 +150,6 @@ func (c *Compiler) buildUploadAssetsJob(data *WorkflowData, mainJobName string)
Condition: jobCondition,
PreSteps: preSteps,
Token: data.SafeOutputs.UploadAssets.GitHubToken,
+ Needs: needs,
})
}
diff --git a/pkg/workflow/publish_assets_test.go b/pkg/workflow/publish_assets_test.go
index 880cb76430c..88b6a7e6c3b 100644
--- a/pkg/workflow/publish_assets_test.go
+++ b/pkg/workflow/publish_assets_test.go
@@ -16,7 +16,7 @@ func TestParseUploadAssetConfig(t *testing.T) {
{
name: "upload-asset config with custom values",
input: map[string]any{
- "upload-assets": map[string]any{
+ "upload-asset": map[string]any{
"branch": "my-assets/${{ github.event.repository.name }}",
"max-size": 5120,
"allowed-exts": []any{".jpg", ".png", ".txt"},
@@ -33,7 +33,7 @@ func TestParseUploadAssetConfig(t *testing.T) {
{
name: "upload-asset config with max",
input: map[string]any{
- "upload-assets": map[string]any{
+ "upload-asset": map[string]any{
"max": 5,
},
},
@@ -126,7 +126,7 @@ func TestUploadAssetsJobUsesFileInput(t *testing.T) {
},
}
- job, err := c.buildUploadAssetsJob(data, "agent")
+ job, err := c.buildUploadAssetsJob(data, "agent", false)
if err != nil {
t.Fatalf("Failed to build upload assets job: %v", err)
}
diff --git a/pkg/workflow/safe_outputs_config.go b/pkg/workflow/safe_outputs_config.go
index 3e05640523d..65561602280 100644
--- a/pkg/workflow/safe_outputs_config.go
+++ b/pkg/workflow/safe_outputs_config.go
@@ -130,7 +130,7 @@ func GetEnabledSafeOutputToolNames(safeOutputs *SafeOutputsConfig) []string {
tools = append(tools, "push_to_pull_request_branch")
}
if safeOutputs.UploadAssets != nil {
- tools = append(tools, "upload_assets")
+ tools = append(tools, "upload_asset")
}
if safeOutputs.UpdateRelease != nil {
tools = append(tools, "update_release")
diff --git a/pkg/workflow/safe_outputs_integration_test.go b/pkg/workflow/safe_outputs_integration_test.go
index b5e41d71369..2fb57d3dafd 100644
--- a/pkg/workflow/safe_outputs_integration_test.go
+++ b/pkg/workflow/safe_outputs_integration_test.go
@@ -181,7 +181,7 @@ func TestSafeOutputJobsIntegration(t *testing.T) {
},
requiredEnvVar: "GH_AW_WORKFLOW_ID",
jobBuilder: func(c *Compiler, data *WorkflowData, mainJobName string) (*Job, error) {
- return c.buildUploadAssetsJob(data, mainJobName)
+ return c.buildUploadAssetsJob(data, mainJobName, false)
},
},
{
diff --git a/pkg/workflow/upload_assets_config_test.go b/pkg/workflow/upload_assets_config_test.go
index 19190b02de6..ca4bd007c58 100644
--- a/pkg/workflow/upload_assets_config_test.go
+++ b/pkg/workflow/upload_assets_config_test.go
@@ -9,7 +9,7 @@ func TestUploadAssetsConfigDefaults(t *testing.T) {
// Test default configuration
outputMap := map[string]any{
- "upload-assets": nil,
+ "upload-asset": nil,
}
config := compiler.parseUploadAssetConfig(outputMap)
@@ -40,7 +40,7 @@ func TestUploadAssetsConfigCustomExtensions(t *testing.T) {
// Test custom configuration like dev.md
outputMap := map[string]any{
- "upload-assets": map[string]any{
+ "upload-asset": map[string]any{
"allowed-exts": []any{".txt"},
"max-size": 1024,
},
diff --git a/specs/safe-output-environment-variables.md b/specs/safe-output-environment-variables.md
index ebe6c69d8ef..6e7b585246a 100644
--- a/specs/safe-output-environment-variables.md
+++ b/specs/safe-output-environment-variables.md
@@ -140,7 +140,7 @@ Each safe output type has additional environment variables specific to its confi
| `GH_AW_COMMENT_ID` | Comment ID from activation job | Command-triggered workflow | From activation job output |
| `GH_AW_COMMENT_REPO` | Repository containing comment | Command-triggered workflow | From activation job output |
-### Upload Assets (`upload-assets:`)
+### Upload Assets (`upload-asset:`)
| Variable | Description | Set When | Example |
|----------|-------------|----------|---------|