From 5c41afb1a111309b7becfd208b541fdcf6a03aa4 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 13:54:49 +0000
Subject: [PATCH 1/7] feat: add cleanup-cache-memory job to agentics
maintenance workflow
Add a new scheduled job that cleans outdated cache-memory caches:
- Lists all caches with "memory-" prefix via GitHub Actions API
- Groups caches by key prefix (everything before the run ID)
- Sorts by embedded run ID (last numeric segment)
- Keeps the latest entry per group, deletes the rest
- Includes timeouts between API calls to avoid throttling
- Skips if GitHub API rate limit is too low (< 100 remaining)
- Checks rate limit periodically during deletion and stops early if needed
Files added:
- actions/setup/js/cleanup_cache_memory.cjs - cleanup script
- actions/setup/js/cleanup_cache_memory.test.cjs - unit tests (20 tests)
Files modified:
- pkg/workflow/maintenance_workflow.go - add cleanup-cache-memory job
- pkg/workflow/maintenance_workflow_test.go - include new job in condition checks
- .github/workflows/agentics-maintenance.yml - regenerated
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/5d9226f6-d004-4ef1-b72a-d5ba94d545d5
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/agentics-maintenance.yml | 27 ++
actions/setup/js/cleanup_cache_memory.cjs | 335 +++++++++++++++++
.../setup/js/cleanup_cache_memory.test.cjs | 346 ++++++++++++++++++
pkg/workflow/maintenance_workflow.go | 37 ++
pkg/workflow/maintenance_workflow_test.go | 2 +-
5 files changed, 746 insertions(+), 1 deletion(-)
create mode 100644 actions/setup/js/cleanup_cache_memory.cjs
create mode 100644 actions/setup/js/cleanup_cache_memory.test.cjs
diff --git a/.github/workflows/agentics-maintenance.yml b/.github/workflows/agentics-maintenance.yml
index 378b52c38fa..ac0b35306c1 100644
--- a/.github/workflows/agentics-maintenance.yml
+++ b/.github/workflows/agentics-maintenance.yml
@@ -107,6 +107,33 @@ jobs:
const { main } = require('${{ runner.temp }}/gh-aw/actions/close_expired_pull_requests.cjs');
await main();
+ cleanup-cache-memory:
+ if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '') }}
+ runs-on: ubuntu-slim
+ permissions:
+ actions: write
+ steps:
+ - name: Checkout actions folder
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ sparse-checkout: |
+ actions
+ persist-credentials: false
+
+ - name: Setup Scripts
+ uses: ./actions/setup
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+
+ - name: Cleanup outdated cache-memory entries
+ uses: actions/github-script@373c709c69115d41ff229c7e5df9f8788daa9553 # v9
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/cleanup_cache_memory.cjs');
+ await main();
+
run_operation:
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
runs-on: ubuntu-slim
diff --git a/actions/setup/js/cleanup_cache_memory.cjs b/actions/setup/js/cleanup_cache_memory.cjs
new file mode 100644
index 00000000000..9ff6c686d80
--- /dev/null
+++ b/actions/setup/js/cleanup_cache_memory.cjs
@@ -0,0 +1,335 @@
+// @ts-check
+///
+
+const { getErrorMessage } = require("./error_helpers.cjs");
+
+/**
+ * Delay execution for a given number of milliseconds.
+ * Used to avoid GitHub API throttling between requests.
+ * @param {number} ms - Milliseconds to wait
+ * @returns {Promise}
+ */
+function delay(ms) {
+ return new Promise(resolve => setTimeout(resolve, ms));
+}
+
+/**
+ * Check the current rate limit and determine if we should continue.
+ * Returns the remaining requests count, or -1 if we couldn't check.
+ * @param {any} github - GitHub REST client
+ * @returns {Promise} Remaining requests, or -1 on error
+ */
+async function getRateLimitRemaining(github) {
+ try {
+ const { data } = await github.rest.rateLimit.get();
+ return data.rate.remaining;
+ } catch {
+ return -1;
+ }
+}
+
+/**
+ * Minimum rate limit remaining before we skip further operations.
+ * This reserves capacity for other workflow jobs and API consumers.
+ */
+const MIN_RATE_LIMIT_REMAINING = 100;
+
+/**
+ * Default delay in ms between delete operations to avoid throttling.
+ */
+const DELETE_DELAY_MS = 250;
+
+/**
+ * Default delay in ms between list pages to avoid throttling.
+ */
+const LIST_DELAY_MS = 100;
+
+/**
+ * Extract the run ID from a cache key.
+ * Cache keys follow the pattern: memory-{parts}-{runID}
+ * where runID is the last numeric segment.
+ *
+ * @param {string} key - Cache key string
+ * @returns {number | null} The extracted run ID, or null if not found
+ */
+function extractRunId(key) {
+ const parts = key.split("-");
+ // Walk backwards to find the last purely numeric segment
+ for (let i = parts.length - 1; i >= 0; i--) {
+ if (/^\d+$/.test(parts[i])) {
+ return parseInt(parts[i], 10);
+ }
+ }
+ return null;
+}
+
+/**
+ * Derive the group key from a cache key by removing the run ID suffix.
+ * This groups caches that differ only by their run ID.
+ *
+ * @param {string} key - Cache key string
+ * @returns {string} The group key (everything before the run ID)
+ */
+function deriveGroupKey(key) {
+ const parts = key.split("-");
+ // Walk backwards to find the last purely numeric segment and strip it
+ for (let i = parts.length - 1; i >= 0; i--) {
+ if (/^\d+$/.test(parts[i])) {
+ return parts.slice(0, i).join("-");
+ }
+ }
+ // If no numeric segment found, return the full key
+ return key;
+}
+
+/**
+ * @typedef {Object} CacheEntry
+ * @property {number} id - Cache ID for deletion
+ * @property {string} key - Full cache key
+ * @property {number | null} runId - Extracted run ID
+ * @property {string} groupKey - Group key (key without run ID)
+ */
+
+/**
+ * List all caches starting with "memory-" prefix, handling pagination.
+ *
+ * @param {any} github - GitHub REST client
+ * @param {string} owner - Repository owner
+ * @param {string} repo - Repository name
+ * @param {number} [listDelayMs] - Delay between list pages in ms
+ * @returns {Promise} List of cache entries
+ */
+async function listMemoryCaches(github, owner, repo, listDelayMs = LIST_DELAY_MS) {
+ /** @type {CacheEntry[]} */
+ const caches = [];
+ let page = 1;
+ const perPage = 100;
+
+ while (true) {
+ const response = await github.rest.actions.getActionsCacheList({
+ owner,
+ repo,
+ key: "memory-",
+ per_page: perPage,
+ page,
+ sort: "key",
+ direction: "asc",
+ });
+
+ const actionsCaches = response.data.actions_caches;
+ if (!actionsCaches || actionsCaches.length === 0) {
+ break;
+ }
+
+ for (const cache of actionsCaches) {
+ if (!cache.key || !cache.key.startsWith("memory-")) {
+ continue;
+ }
+ caches.push({
+ id: cache.id,
+ key: cache.key,
+ runId: extractRunId(cache.key),
+ groupKey: deriveGroupKey(cache.key),
+ });
+ }
+
+ if (actionsCaches.length < perPage) {
+ break;
+ }
+
+ page++;
+ // Throttle between list pages
+ await delay(listDelayMs);
+ }
+
+ return caches;
+}
+
+/**
+ * Group caches by their group key (everything except run ID),
+ * then for each group keep only the entry with the highest run ID
+ * and return the rest for deletion.
+ *
+ * @param {CacheEntry[]} caches - List of cache entries
+ * @returns {{ toDelete: CacheEntry[], kept: CacheEntry[] }}
+ */
+function identifyCachesToDelete(caches) {
+ /** @type {Map} */
+ const groups = new Map();
+
+ for (const cache of caches) {
+ if (cache.runId === null) {
+ // Skip caches without a recognizable run ID
+ continue;
+ }
+ const group = groups.get(cache.groupKey) || [];
+ group.push(cache);
+ groups.set(cache.groupKey, group);
+ }
+
+ /** @type {CacheEntry[]} */
+ const toDelete = [];
+ /** @type {CacheEntry[]} */
+ const kept = [];
+
+ for (const [, group] of groups) {
+ if (group.length <= 1) {
+ // Only one entry in this group, nothing to clean up
+ if (group.length === 1) {
+ kept.push(group[0]);
+ }
+ continue;
+ }
+
+ // Sort by run ID descending (highest first = latest)
+ group.sort((a, b) => (b.runId ?? 0) - (a.runId ?? 0));
+
+ // Keep the first (latest), mark the rest for deletion
+ kept.push(group[0]);
+ toDelete.push(...group.slice(1));
+ }
+
+ return { toDelete, kept };
+}
+
+/**
+ * Main entry point: cleanup outdated cache-memory caches.
+ *
+ * Lists all caches with "memory-" prefix, groups them by key prefix,
+ * keeps the latest run ID per group, and deletes the rest.
+ * Includes timeouts to avoid GitHub API throttling and skips
+ * if rate limiting is too high.
+ *
+ * @param {Object} [options] - Optional configuration for testing
+ * @param {number} [options.deleteDelayMs] - Delay between deletions (default: DELETE_DELAY_MS)
+ * @param {number} [options.listDelayMs] - Delay between list pages (default: LIST_DELAY_MS)
+ */
+async function main(options = {}) {
+ const deleteDelayMs = options.deleteDelayMs ?? DELETE_DELAY_MS;
+ const listDelayMs = options.listDelayMs ?? LIST_DELAY_MS;
+
+ const owner = context.repo.owner;
+ const repo = context.repo.repo;
+
+ core.info("๐งน Starting cache-memory cleanup");
+
+ // Check rate limit before starting
+ const initialRemaining = await getRateLimitRemaining(github);
+ if (initialRemaining !== -1 && initialRemaining < MIN_RATE_LIMIT_REMAINING) {
+ core.warning(`โ ๏ธ Rate limit too low (${initialRemaining} remaining, minimum: ${MIN_RATE_LIMIT_REMAINING}). Skipping cache cleanup.`);
+ core.summary.addRaw(`## Cache Memory Cleanup\n\nโ ๏ธ Skipped: Rate limit too low (${initialRemaining} remaining, minimum required: ${MIN_RATE_LIMIT_REMAINING})\n`);
+ await core.summary.write();
+ return;
+ }
+
+ core.info(` Rate limit remaining: ${initialRemaining === -1 ? "unknown" : initialRemaining}`);
+
+ // List all memory caches
+ core.info("๐ Listing caches with 'memory-' prefix...");
+ let caches;
+ try {
+ caches = await listMemoryCaches(github, owner, repo, listDelayMs);
+ } catch (error) {
+ core.error(`โ Failed to list caches: ${getErrorMessage(error)}`);
+ core.summary.addRaw(`## Cache Memory Cleanup\n\nโ Failed to list caches: ${getErrorMessage(error)}\n`);
+ await core.summary.write();
+ return;
+ }
+
+ core.info(` Found ${caches.length} cache(s) with 'memory-' prefix`);
+
+ if (caches.length === 0) {
+ core.info("โ
No memory caches found. Nothing to clean up.");
+ core.summary.addRaw("## Cache Memory Cleanup\n\nโ
No memory caches found. Nothing to clean up.\n");
+ await core.summary.write();
+ return;
+ }
+
+ // Identify which caches to delete
+ const { toDelete, kept } = identifyCachesToDelete(caches);
+
+ core.info(` Groups with latest entries kept: ${kept.length}`);
+ core.info(` Outdated entries to delete: ${toDelete.length}`);
+
+ if (toDelete.length === 0) {
+ core.info("โ
No outdated caches to clean up. All entries are current.");
+ core.summary.addRaw(`## Cache Memory Cleanup\n\nโ
No outdated caches to clean up.\n- Total memory caches: ${caches.length}\n- Groups: ${kept.length}\n`);
+ await core.summary.write();
+ return;
+ }
+
+ // Delete outdated caches with throttling
+ core.info(`๐๏ธ Deleting ${toDelete.length} outdated cache(s)...`);
+ let deletedCount = 0;
+ let failedCount = 0;
+ /** @type {string[]} */
+ const errors = [];
+
+ for (const cache of toDelete) {
+ // Check rate limit periodically (every 10 deletions)
+ if (deletedCount > 0 && deletedCount % 10 === 0) {
+ const remaining = await getRateLimitRemaining(github);
+ if (remaining !== -1 && remaining < MIN_RATE_LIMIT_REMAINING) {
+ core.warning(`โ ๏ธ Rate limit getting low (${remaining} remaining). Stopping deletion early.`);
+ core.warning(` Deleted ${deletedCount} of ${toDelete.length} caches before stopping.`);
+ break;
+ }
+ }
+
+ try {
+ await github.rest.actions.deleteActionsCacheById({
+ owner,
+ repo,
+ cache_id: cache.id,
+ });
+ deletedCount++;
+ core.info(` โ Deleted cache: ${cache.key} (run ID: ${cache.runId})`);
+ } catch (error) {
+ failedCount++;
+ const msg = `Failed to delete cache ${cache.key}: ${getErrorMessage(error)}`;
+ errors.push(msg);
+ core.warning(` โ ${msg}`);
+ }
+
+ // Throttle between deletions
+ await delay(deleteDelayMs);
+ }
+
+ // Summary
+ core.info(`\n๐ Cache cleanup complete:`);
+ core.info(` Total memory caches found: ${caches.length}`);
+ core.info(` Groups (latest kept): ${kept.length}`);
+ core.info(` Outdated deleted: ${deletedCount}`);
+ if (failedCount > 0) {
+ core.info(` Failed to delete: ${failedCount}`);
+ }
+
+ // Write job summary
+ let summary = `## Cache Memory Cleanup\n\n`;
+ summary += `| Metric | Count |\n|--------|-------|\n`;
+ summary += `| Total memory caches | ${caches.length} |\n`;
+ summary += `| Groups (latest kept) | ${kept.length} |\n`;
+ summary += `| Outdated deleted | ${deletedCount} |\n`;
+ if (failedCount > 0) {
+ summary += `| Failed to delete | ${failedCount} |\n`;
+ }
+ if (errors.length > 0) {
+ summary += `\n### Errors\n\n`;
+ for (const err of errors) {
+ summary += `- ${err}\n`;
+ }
+ }
+ core.summary.addRaw(summary);
+ await core.summary.write();
+
+ core.info("โ
Cache memory cleanup finished");
+}
+
+module.exports = {
+ main,
+ extractRunId,
+ deriveGroupKey,
+ identifyCachesToDelete,
+ listMemoryCaches,
+ MIN_RATE_LIMIT_REMAINING,
+};
diff --git a/actions/setup/js/cleanup_cache_memory.test.cjs b/actions/setup/js/cleanup_cache_memory.test.cjs
new file mode 100644
index 00000000000..46b195a0510
--- /dev/null
+++ b/actions/setup/js/cleanup_cache_memory.test.cjs
@@ -0,0 +1,346 @@
+// @ts-check
+import { describe, it, expect, beforeEach, vi } from "vitest";
+
+// Mock core and context globals
+const mockCore = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+ summary: {
+ addRaw: vi.fn().mockReturnThis(),
+ write: vi.fn(),
+ },
+ setOutput: vi.fn(),
+};
+
+const mockContext = {
+ repo: {
+ owner: "testowner",
+ repo: "testrepo",
+ },
+};
+
+global.core = mockCore;
+global.context = mockContext;
+
+describe("cleanup_cache_memory", () => {
+ let mockGithub;
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+ mockGithub = {
+ rest: {
+ actions: {
+ getActionsCacheList: vi.fn(),
+ deleteActionsCacheById: vi.fn(),
+ },
+ rateLimit: {
+ get: vi.fn().mockResolvedValue({
+ data: { rate: { remaining: 5000 } },
+ }),
+ },
+ },
+ };
+ global.github = mockGithub;
+ });
+
+ describe("extractRunId", () => {
+ it("should extract run ID from standard cache key", async () => {
+ const { extractRunId } = await import("./cleanup_cache_memory.cjs");
+ expect(extractRunId("memory-none-nopolicy-workflow-12345")).toBe(12345);
+ });
+
+ it("should extract run ID from integrity-aware cache key", async () => {
+ const { extractRunId } = await import("./cleanup_cache_memory.cjs");
+ expect(extractRunId("memory-unapproved-7e4d9f12-session-workflow-67890")).toBe(67890);
+ });
+
+ it("should return null when no numeric segment exists", async () => {
+ const { extractRunId } = await import("./cleanup_cache_memory.cjs");
+ expect(extractRunId("memory-none-nopolicy-workflow")).toBeNull();
+ });
+
+ it("should handle cache key with only run ID as numeric part", async () => {
+ const { extractRunId } = await import("./cleanup_cache_memory.cjs");
+ expect(extractRunId("memory-abc-def-99999")).toBe(99999);
+ });
+ });
+
+ describe("deriveGroupKey", () => {
+ it("should derive group key by removing run ID suffix", async () => {
+ const { deriveGroupKey } = await import("./cleanup_cache_memory.cjs");
+ expect(deriveGroupKey("memory-none-nopolicy-workflow-12345")).toBe("memory-none-nopolicy-workflow");
+ });
+
+ it("should handle integrity-aware keys", async () => {
+ const { deriveGroupKey } = await import("./cleanup_cache_memory.cjs");
+ expect(deriveGroupKey("memory-unapproved-7e4d9f12-session-67890")).toBe("memory-unapproved-7e4d9f12-session");
+ });
+
+ it("should return full key when no numeric segment found", async () => {
+ const { deriveGroupKey } = await import("./cleanup_cache_memory.cjs");
+ expect(deriveGroupKey("memory-none-nopolicy-workflow")).toBe("memory-none-nopolicy-workflow");
+ });
+ });
+
+ describe("identifyCachesToDelete", () => {
+ it("should keep latest run ID and mark older ones for deletion", async () => {
+ const { identifyCachesToDelete } = await import("./cleanup_cache_memory.cjs");
+
+ const caches = [
+ { id: 1, key: "memory-none-nopolicy-workflow-100", runId: 100, groupKey: "memory-none-nopolicy-workflow" },
+ { id: 2, key: "memory-none-nopolicy-workflow-200", runId: 200, groupKey: "memory-none-nopolicy-workflow" },
+ { id: 3, key: "memory-none-nopolicy-workflow-150", runId: 150, groupKey: "memory-none-nopolicy-workflow" },
+ ];
+
+ const { toDelete, kept } = identifyCachesToDelete(caches);
+
+ expect(kept).toHaveLength(1);
+ expect(kept[0].runId).toBe(200);
+ expect(toDelete).toHaveLength(2);
+ expect(toDelete.map(c => c.runId).sort((a, b) => a - b)).toEqual([100, 150]);
+ });
+
+ it("should handle multiple groups independently", async () => {
+ const { identifyCachesToDelete } = await import("./cleanup_cache_memory.cjs");
+
+ const caches = [
+ { id: 1, key: "memory-none-nopolicy-wf1-100", runId: 100, groupKey: "memory-none-nopolicy-wf1" },
+ { id: 2, key: "memory-none-nopolicy-wf1-200", runId: 200, groupKey: "memory-none-nopolicy-wf1" },
+ { id: 3, key: "memory-none-nopolicy-wf2-50", runId: 50, groupKey: "memory-none-nopolicy-wf2" },
+ { id: 4, key: "memory-none-nopolicy-wf2-75", runId: 75, groupKey: "memory-none-nopolicy-wf2" },
+ ];
+
+ const { toDelete, kept } = identifyCachesToDelete(caches);
+
+ expect(kept).toHaveLength(2);
+ expect(kept.map(c => c.runId).sort((a, b) => a - b)).toEqual([75, 200]);
+ expect(toDelete).toHaveLength(2);
+ expect(toDelete.map(c => c.runId).sort((a, b) => a - b)).toEqual([50, 100]);
+ });
+
+ it("should not delete when only one entry per group", async () => {
+ const { identifyCachesToDelete } = await import("./cleanup_cache_memory.cjs");
+
+ const caches = [
+ { id: 1, key: "memory-none-nopolicy-wf1-100", runId: 100, groupKey: "memory-none-nopolicy-wf1" },
+ { id: 2, key: "memory-none-nopolicy-wf2-200", runId: 200, groupKey: "memory-none-nopolicy-wf2" },
+ ];
+
+ const { toDelete, kept } = identifyCachesToDelete(caches);
+
+ expect(kept).toHaveLength(2);
+ expect(toDelete).toHaveLength(0);
+ });
+
+ it("should skip entries with null run ID", async () => {
+ const { identifyCachesToDelete } = await import("./cleanup_cache_memory.cjs");
+
+ const caches = [
+ { id: 1, key: "memory-none-nopolicy-workflow", runId: null, groupKey: "memory-none-nopolicy-workflow" },
+ { id: 2, key: "memory-none-nopolicy-wf-100", runId: 100, groupKey: "memory-none-nopolicy-wf" },
+ ];
+
+ const { toDelete, kept } = identifyCachesToDelete(caches);
+
+ expect(kept).toHaveLength(1);
+ expect(kept[0].runId).toBe(100);
+ expect(toDelete).toHaveLength(0);
+ });
+
+ it("should handle empty input", async () => {
+ const { identifyCachesToDelete } = await import("./cleanup_cache_memory.cjs");
+
+ const { toDelete, kept } = identifyCachesToDelete([]);
+
+ expect(kept).toHaveLength(0);
+ expect(toDelete).toHaveLength(0);
+ });
+ });
+
+ describe("main - no caches found", () => {
+ it("should handle case when no memory caches exist", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: {
+ total_count: 0,
+ actions_caches: [],
+ },
+ });
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("No memory caches found"));
+ expect(mockCore.summary.addRaw).toHaveBeenCalledWith(expect.stringContaining("No memory caches found"));
+ expect(mockCore.summary.write).toHaveBeenCalled();
+ });
+ });
+
+ describe("main - rate limit too low", () => {
+ it("should skip cleanup when rate limit is below threshold", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.rateLimit.get.mockResolvedValueOnce({
+ data: { rate: { remaining: 50 } },
+ });
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("Rate limit too low"));
+ expect(mockGithub.rest.actions.getActionsCacheList).not.toHaveBeenCalled();
+ expect(mockCore.summary.addRaw).toHaveBeenCalledWith(expect.stringContaining("Skipped: Rate limit too low"));
+ expect(mockCore.summary.write).toHaveBeenCalled();
+ });
+ });
+
+ describe("main - deletes outdated caches", () => {
+ it("should delete older caches and keep latest per group", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: {
+ total_count: 3,
+ actions_caches: [
+ { id: 1, key: "memory-none-nopolicy-workflow-100" },
+ { id: 2, key: "memory-none-nopolicy-workflow-200" },
+ { id: 3, key: "memory-none-nopolicy-workflow-150" },
+ ],
+ },
+ });
+
+ mockGithub.rest.actions.deleteActionsCacheById.mockResolvedValue({});
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ // Should have deleted 2 caches (run IDs 100 and 150)
+ expect(mockGithub.rest.actions.deleteActionsCacheById).toHaveBeenCalledTimes(2);
+ expect(mockGithub.rest.actions.deleteActionsCacheById).toHaveBeenCalledWith(expect.objectContaining({ owner: "testowner", repo: "testrepo" }));
+
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("Cache memory cleanup finished"));
+ expect(mockCore.summary.write).toHaveBeenCalled();
+ });
+ });
+
+ describe("main - all caches are current", () => {
+ it("should not delete when each group has only one entry", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: {
+ total_count: 2,
+ actions_caches: [
+ { id: 1, key: "memory-none-nopolicy-wf1-100" },
+ { id: 2, key: "memory-none-nopolicy-wf2-200" },
+ ],
+ },
+ });
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ expect(mockGithub.rest.actions.deleteActionsCacheById).not.toHaveBeenCalled();
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("No outdated caches to clean up"));
+ });
+ });
+
+ describe("main - handles delete errors gracefully", () => {
+ it("should continue deleting after individual failures", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: {
+ total_count: 3,
+ actions_caches: [
+ { id: 1, key: "memory-none-nopolicy-wf-100" },
+ { id: 2, key: "memory-none-nopolicy-wf-200" },
+ { id: 3, key: "memory-none-nopolicy-wf-300" },
+ ],
+ },
+ });
+
+ // First delete fails, second succeeds
+ mockGithub.rest.actions.deleteActionsCacheById.mockRejectedValueOnce(new Error("API error")).mockResolvedValueOnce({});
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ // Should have attempted to delete 2 caches (IDs 100 and 200, keeping 300)
+ expect(mockGithub.rest.actions.deleteActionsCacheById).toHaveBeenCalledTimes(2);
+ expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("Failed to delete cache"));
+ expect(mockCore.summary.addRaw).toHaveBeenCalledWith(expect.stringContaining("Errors"));
+ });
+ });
+
+ describe("main - handles list error", () => {
+ it("should handle errors when listing caches", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockRejectedValueOnce(new Error("API error"));
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ expect(mockCore.error).toHaveBeenCalledWith(expect.stringContaining("Failed to list caches"));
+ expect(mockCore.summary.write).toHaveBeenCalled();
+ });
+ });
+
+ describe("main - pagination", () => {
+ it("should handle paginated cache list results", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ // First page - full page of 100 items (simulated with 2 for testing)
+ const page1Caches = [];
+ for (let i = 0; i < 100; i++) {
+ page1Caches.push({ id: i + 1, key: `memory-none-nopolicy-wf-${1000 + i}` });
+ }
+
+ // Second page - partial page
+ const page2Caches = [{ id: 101, key: "memory-none-nopolicy-wf-2000" }];
+
+ mockGithub.rest.actions.getActionsCacheList
+ .mockResolvedValueOnce({
+ data: { total_count: 101, actions_caches: page1Caches },
+ })
+ .mockResolvedValueOnce({
+ data: { total_count: 101, actions_caches: page2Caches },
+ });
+
+ mockGithub.rest.actions.deleteActionsCacheById.mockResolvedValue({});
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ // Should have fetched 2 pages
+ expect(mockGithub.rest.actions.getActionsCacheList).toHaveBeenCalledTimes(2);
+
+ // Should delete 100 caches (keep only run 2000, delete 1000-1099)
+ expect(mockGithub.rest.actions.deleteActionsCacheById).toHaveBeenCalledTimes(100);
+ });
+ });
+
+ describe("main - rate limit stops early", () => {
+ it("should stop deleting when rate limit drops below threshold", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ // Create 15 caches in same group (need >10 to trigger the rate limit check)
+ const caches = [];
+ for (let i = 0; i < 15; i++) {
+ caches.push({ id: i + 1, key: `memory-none-nopolicy-wf-${100 + i}` });
+ }
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: { total_count: 15, actions_caches: caches },
+ });
+
+ // First rate limit check: OK (initial check before starting)
+ // After 10 deletions: rate limit is too low
+ mockGithub.rest.rateLimit.get.mockResolvedValueOnce({ data: { rate: { remaining: 5000 } } }).mockResolvedValueOnce({ data: { rate: { remaining: 50 } } });
+
+ mockGithub.rest.actions.deleteActionsCacheById.mockResolvedValue({});
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ // Should have stopped after 10 deletions (checked rate limit, it was low)
+ expect(mockGithub.rest.actions.deleteActionsCacheById).toHaveBeenCalledTimes(10);
+ expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("Rate limit getting low"));
+ });
+ });
+});
diff --git a/pkg/workflow/maintenance_workflow.go b/pkg/workflow/maintenance_workflow.go
index 748fbdc012b..fff0938d306 100644
--- a/pkg/workflow/maintenance_workflow.go
+++ b/pkg/workflow/maintenance_workflow.go
@@ -343,6 +343,43 @@ jobs:
await main();
`)
+ // Add cleanup-cache-memory job for scheduled runs
+ // This job lists all caches starting with "memory-", groups them by key prefix,
+ // keeps the latest run ID per group, and deletes the rest.
+ yaml.WriteString(`
+ cleanup-cache-memory:
+ if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '') }}
+ runs-on: ` + runsOnValue + `
+ permissions:
+ actions: write
+ steps:
+`)
+
+ // Add checkout step only in dev/script mode (for local action paths)
+ if actionMode == ActionModeDev || actionMode == ActionModeScript {
+ yaml.WriteString(" - name: Checkout actions folder\n")
+ yaml.WriteString(" uses: " + GetActionPin("actions/checkout") + "\n")
+ yaml.WriteString(" with:\n")
+ yaml.WriteString(" sparse-checkout: |\n")
+ yaml.WriteString(" actions\n")
+ yaml.WriteString(" persist-credentials: false\n\n")
+ }
+
+ yaml.WriteString(` - name: Setup Scripts
+ uses: ` + setupActionRef + `
+ with:
+ destination: ${{ runner.temp }}/gh-aw/actions
+
+ - name: Cleanup outdated cache-memory entries
+ uses: ` + GetActionPin("actions/github-script") + `
+ with:
+ script: |
+ const { setupGlobals } = require('${{ runner.temp }}/gh-aw/actions/setup_globals.cjs');
+ setupGlobals(core, github, context, exec, io, getOctokit);
+ const { main } = require('${{ runner.temp }}/gh-aw/actions/cleanup_cache_memory.cjs');
+ await main();
+`)
+
// Add unified run_operation job for all dispatch operations except safe_outputs, create_labels, and validate
yaml.WriteString(`
run_operation:
diff --git a/pkg/workflow/maintenance_workflow_test.go b/pkg/workflow/maintenance_workflow_test.go
index ad750f80d46..bbf080639a8 100644
--- a/pkg/workflow/maintenance_workflow_test.go
+++ b/pkg/workflow/maintenance_workflow_test.go
@@ -289,7 +289,7 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
const runOpSectionSearchRange = 400
// Jobs that should be disabled when operation is set
- disabledJobs := []string{"close-expired-entities:", "compile-workflows:", "zizmor-scan:", "secret-validation:"}
+ disabledJobs := []string{"close-expired-entities:", "cleanup-cache-memory:", "compile-workflows:", "zizmor-scan:", "secret-validation:"}
for _, job := range disabledJobs {
// Find the if: condition for each job
jobIdx := strings.Index(yaml, "\n "+job)
From ed5d105c82b5f2951c46a8e590de211cbd82358d Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 13:58:22 +0000
Subject: [PATCH 2/7] fix: sort disabled jobs list alphabetically in
maintenance workflow test
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/5d9226f6-d004-4ef1-b72a-d5ba94d545d5
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
pkg/workflow/maintenance_workflow_test.go | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pkg/workflow/maintenance_workflow_test.go b/pkg/workflow/maintenance_workflow_test.go
index bbf080639a8..6ef687a14f3 100644
--- a/pkg/workflow/maintenance_workflow_test.go
+++ b/pkg/workflow/maintenance_workflow_test.go
@@ -289,7 +289,7 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
const runOpSectionSearchRange = 400
// Jobs that should be disabled when operation is set
- disabledJobs := []string{"close-expired-entities:", "cleanup-cache-memory:", "compile-workflows:", "zizmor-scan:", "secret-validation:"}
+ disabledJobs := []string{"cleanup-cache-memory:", "close-expired-entities:", "compile-workflows:", "secret-validation:", "zizmor-scan:"}
for _, job := range disabledJobs {
// Find the if: condition for each job
jobIdx := strings.Index(yaml, "\n "+job)
From 56107ff88228170cb686f390747dd728c365f1c7 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 14:17:21 +0000
Subject: [PATCH 3/7] refactor: address PR review feedback
- Reuse delay from expired_entity_cleanup_helpers.cjs instead of duplicating
- Extract rate limit checking to shared rate_limit_helpers.cjs with tests
- Combine extractRunId/deriveGroupKey into single parseCacheKey function
- Add MAX_LIST_PAGES (50) upper bound on pagination loop
- Sort caches by last_accessed_at descending (newest first)
- Add fetchAndLogRateLimit logging at start and end of cleanup
- Add detailed logging: repository info, kept entries, page progress
- Log rate limit remaining during periodic checks
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/b4041e3a-bd6c-45f6-abaa-d48acfb07f3c
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
actions/setup/js/cleanup_cache_memory.cjs | 117 +++++-------
.../setup/js/cleanup_cache_memory.test.cjs | 168 ++++++++++++++----
actions/setup/js/rate_limit_helpers.cjs | 51 ++++++
actions/setup/js/rate_limit_helpers.test.cjs | 83 +++++++++
4 files changed, 312 insertions(+), 107 deletions(-)
create mode 100644 actions/setup/js/rate_limit_helpers.cjs
create mode 100644 actions/setup/js/rate_limit_helpers.test.cjs
diff --git a/actions/setup/js/cleanup_cache_memory.cjs b/actions/setup/js/cleanup_cache_memory.cjs
index 9ff6c686d80..0872161b2ce 100644
--- a/actions/setup/js/cleanup_cache_memory.cjs
+++ b/actions/setup/js/cleanup_cache_memory.cjs
@@ -2,37 +2,9 @@
///
const { getErrorMessage } = require("./error_helpers.cjs");
-
-/**
- * Delay execution for a given number of milliseconds.
- * Used to avoid GitHub API throttling between requests.
- * @param {number} ms - Milliseconds to wait
- * @returns {Promise}
- */
-function delay(ms) {
- return new Promise(resolve => setTimeout(resolve, ms));
-}
-
-/**
- * Check the current rate limit and determine if we should continue.
- * Returns the remaining requests count, or -1 if we couldn't check.
- * @param {any} github - GitHub REST client
- * @returns {Promise} Remaining requests, or -1 on error
- */
-async function getRateLimitRemaining(github) {
- try {
- const { data } = await github.rest.rateLimit.get();
- return data.rate.remaining;
- } catch {
- return -1;
- }
-}
-
-/**
- * Minimum rate limit remaining before we skip further operations.
- * This reserves capacity for other workflow jobs and API consumers.
- */
-const MIN_RATE_LIMIT_REMAINING = 100;
+const { delay } = require("./expired_entity_cleanup_helpers.cjs");
+const { checkRateLimit, MIN_RATE_LIMIT_REMAINING } = require("./rate_limit_helpers.cjs");
+const { fetchAndLogRateLimit } = require("./github_rate_limit_logger.cjs");
/**
* Default delay in ms between delete operations to avoid throttling.
@@ -45,41 +17,30 @@ const DELETE_DELAY_MS = 250;
const LIST_DELAY_MS = 100;
/**
- * Extract the run ID from a cache key.
- * Cache keys follow the pattern: memory-{parts}-{runID}
- * where runID is the last numeric segment.
- *
- * @param {string} key - Cache key string
- * @returns {number | null} The extracted run ID, or null if not found
+ * Maximum number of pages to fetch when listing caches.
+ * At 100 caches per page this allows up to 5000 caches.
*/
-function extractRunId(key) {
- const parts = key.split("-");
- // Walk backwards to find the last purely numeric segment
- for (let i = parts.length - 1; i >= 0; i--) {
- if (/^\d+$/.test(parts[i])) {
- return parseInt(parts[i], 10);
- }
- }
- return null;
-}
+const MAX_LIST_PAGES = 50;
/**
- * Derive the group key from a cache key by removing the run ID suffix.
- * This groups caches that differ only by their run ID.
+ * Parse a cache key to extract the run ID and group key in a single pass.
+ * Cache keys follow the pattern: memory-{parts}-{runID}
+ * where runID is the last purely numeric segment.
*
* @param {string} key - Cache key string
- * @returns {string} The group key (everything before the run ID)
+ * @returns {{ runId: number | null, groupKey: string }}
*/
-function deriveGroupKey(key) {
+function parseCacheKey(key) {
const parts = key.split("-");
- // Walk backwards to find the last purely numeric segment and strip it
for (let i = parts.length - 1; i >= 0; i--) {
if (/^\d+$/.test(parts[i])) {
- return parts.slice(0, i).join("-");
+ return {
+ runId: parseInt(parts[i], 10),
+ groupKey: parts.slice(0, i).join("-"),
+ };
}
}
- // If no numeric segment found, return the full key
- return key;
+ return { runId: null, groupKey: key };
}
/**
@@ -92,6 +53,7 @@ function deriveGroupKey(key) {
/**
* List all caches starting with "memory-" prefix, handling pagination.
+ * Results are sorted newest-first by last_accessed_at from the API.
*
* @param {any} github - GitHub REST client
* @param {string} owner - Repository owner
@@ -105,15 +67,16 @@ async function listMemoryCaches(github, owner, repo, listDelayMs = LIST_DELAY_MS
let page = 1;
const perPage = 100;
- while (true) {
+ while (page <= MAX_LIST_PAGES) {
+ core.info(` Fetching cache list page ${page}...`);
const response = await github.rest.actions.getActionsCacheList({
owner,
repo,
key: "memory-",
per_page: perPage,
page,
- sort: "key",
- direction: "asc",
+ sort: "last_accessed_at",
+ direction: "desc",
});
const actionsCaches = response.data.actions_caches;
@@ -125,14 +88,12 @@ async function listMemoryCaches(github, owner, repo, listDelayMs = LIST_DELAY_MS
if (!cache.key || !cache.key.startsWith("memory-")) {
continue;
}
- caches.push({
- id: cache.id,
- key: cache.key,
- runId: extractRunId(cache.key),
- groupKey: deriveGroupKey(cache.key),
- });
+ const { runId, groupKey } = parseCacheKey(cache.key);
+ caches.push({ id: cache.id, key: cache.key, runId, groupKey });
}
+ core.info(` Page ${page}: ${actionsCaches.length} cache(s) fetched (${caches.length} total)`);
+
if (actionsCaches.length < perPage) {
break;
}
@@ -142,6 +103,10 @@ async function listMemoryCaches(github, owner, repo, listDelayMs = LIST_DELAY_MS
await delay(listDelayMs);
}
+ if (page > MAX_LIST_PAGES) {
+ core.warning(`โ ๏ธ Reached maximum page limit (${MAX_LIST_PAGES}). Some caches may not have been listed.`);
+ }
+
return caches;
}
@@ -212,10 +177,14 @@ async function main(options = {}) {
const repo = context.repo.repo;
core.info("๐งน Starting cache-memory cleanup");
+ core.info(` Repository: ${owner}/${repo}`);
+
+ // Log initial rate limit snapshot for observability
+ await fetchAndLogRateLimit(github, "cleanup_cache_memory_start");
// Check rate limit before starting
- const initialRemaining = await getRateLimitRemaining(github);
- if (initialRemaining !== -1 && initialRemaining < MIN_RATE_LIMIT_REMAINING) {
+ const { ok: rateLimitOk, remaining: initialRemaining } = await checkRateLimit(github, "cleanup_cache_memory_initial");
+ if (!rateLimitOk) {
core.warning(`โ ๏ธ Rate limit too low (${initialRemaining} remaining, minimum: ${MIN_RATE_LIMIT_REMAINING}). Skipping cache cleanup.`);
core.summary.addRaw(`## Cache Memory Cleanup\n\nโ ๏ธ Skipped: Rate limit too low (${initialRemaining} remaining, minimum required: ${MIN_RATE_LIMIT_REMAINING})\n`);
await core.summary.write();
@@ -249,6 +218,9 @@ async function main(options = {}) {
const { toDelete, kept } = identifyCachesToDelete(caches);
core.info(` Groups with latest entries kept: ${kept.length}`);
+ for (const entry of kept) {
+ core.info(` โ Keeping: ${entry.key} (run ID: ${entry.runId})`);
+ }
core.info(` Outdated entries to delete: ${toDelete.length}`);
if (toDelete.length === 0) {
@@ -268,12 +240,13 @@ async function main(options = {}) {
for (const cache of toDelete) {
// Check rate limit periodically (every 10 deletions)
if (deletedCount > 0 && deletedCount % 10 === 0) {
- const remaining = await getRateLimitRemaining(github);
- if (remaining !== -1 && remaining < MIN_RATE_LIMIT_REMAINING) {
+ const { ok, remaining } = await checkRateLimit(github, "cleanup_cache_memory_periodic");
+ if (!ok) {
core.warning(`โ ๏ธ Rate limit getting low (${remaining} remaining). Stopping deletion early.`);
core.warning(` Deleted ${deletedCount} of ${toDelete.length} caches before stopping.`);
break;
}
+ core.info(` Rate limit check: ${remaining} remaining`);
}
try {
@@ -295,6 +268,9 @@ async function main(options = {}) {
await delay(deleteDelayMs);
}
+ // Log final rate limit snapshot for observability
+ await fetchAndLogRateLimit(github, "cleanup_cache_memory_end");
+
// Summary
core.info(`\n๐ Cache cleanup complete:`);
core.info(` Total memory caches found: ${caches.length}`);
@@ -327,9 +303,8 @@ async function main(options = {}) {
module.exports = {
main,
- extractRunId,
- deriveGroupKey,
+ parseCacheKey,
identifyCachesToDelete,
listMemoryCaches,
- MIN_RATE_LIMIT_REMAINING,
+ MAX_LIST_PAGES,
};
diff --git a/actions/setup/js/cleanup_cache_memory.test.cjs b/actions/setup/js/cleanup_cache_memory.test.cjs
index 46b195a0510..1c1c17a4a3e 100644
--- a/actions/setup/js/cleanup_cache_memory.test.cjs
+++ b/actions/setup/js/cleanup_cache_memory.test.cjs
@@ -36,7 +36,10 @@ describe("cleanup_cache_memory", () => {
},
rateLimit: {
get: vi.fn().mockResolvedValue({
- data: { rate: { remaining: 5000 } },
+ data: {
+ rate: { remaining: 5000, limit: 5000, used: 0 },
+ resources: {},
+ },
}),
},
},
@@ -44,42 +47,33 @@ describe("cleanup_cache_memory", () => {
global.github = mockGithub;
});
- describe("extractRunId", () => {
- it("should extract run ID from standard cache key", async () => {
- const { extractRunId } = await import("./cleanup_cache_memory.cjs");
- expect(extractRunId("memory-none-nopolicy-workflow-12345")).toBe(12345);
+ describe("parseCacheKey", () => {
+ it("should extract run ID and group key from standard cache key", async () => {
+ const { parseCacheKey } = await import("./cleanup_cache_memory.cjs");
+ const result = parseCacheKey("memory-none-nopolicy-workflow-12345");
+ expect(result.runId).toBe(12345);
+ expect(result.groupKey).toBe("memory-none-nopolicy-workflow");
});
- it("should extract run ID from integrity-aware cache key", async () => {
- const { extractRunId } = await import("./cleanup_cache_memory.cjs");
- expect(extractRunId("memory-unapproved-7e4d9f12-session-workflow-67890")).toBe(67890);
+ it("should extract run ID and group key from integrity-aware cache key", async () => {
+ const { parseCacheKey } = await import("./cleanup_cache_memory.cjs");
+ const result = parseCacheKey("memory-unapproved-7e4d9f12-session-workflow-67890");
+ expect(result.runId).toBe(67890);
+ expect(result.groupKey).toBe("memory-unapproved-7e4d9f12-session-workflow");
});
- it("should return null when no numeric segment exists", async () => {
- const { extractRunId } = await import("./cleanup_cache_memory.cjs");
- expect(extractRunId("memory-none-nopolicy-workflow")).toBeNull();
+ it("should return null runId when no numeric segment exists", async () => {
+ const { parseCacheKey } = await import("./cleanup_cache_memory.cjs");
+ const result = parseCacheKey("memory-none-nopolicy-workflow");
+ expect(result.runId).toBeNull();
+ expect(result.groupKey).toBe("memory-none-nopolicy-workflow");
});
it("should handle cache key with only run ID as numeric part", async () => {
- const { extractRunId } = await import("./cleanup_cache_memory.cjs");
- expect(extractRunId("memory-abc-def-99999")).toBe(99999);
- });
- });
-
- describe("deriveGroupKey", () => {
- it("should derive group key by removing run ID suffix", async () => {
- const { deriveGroupKey } = await import("./cleanup_cache_memory.cjs");
- expect(deriveGroupKey("memory-none-nopolicy-workflow-12345")).toBe("memory-none-nopolicy-workflow");
- });
-
- it("should handle integrity-aware keys", async () => {
- const { deriveGroupKey } = await import("./cleanup_cache_memory.cjs");
- expect(deriveGroupKey("memory-unapproved-7e4d9f12-session-67890")).toBe("memory-unapproved-7e4d9f12-session");
- });
-
- it("should return full key when no numeric segment found", async () => {
- const { deriveGroupKey } = await import("./cleanup_cache_memory.cjs");
- expect(deriveGroupKey("memory-none-nopolicy-workflow")).toBe("memory-none-nopolicy-workflow");
+ const { parseCacheKey } = await import("./cleanup_cache_memory.cjs");
+ const result = parseCacheKey("memory-abc-def-99999");
+ expect(result.runId).toBe(99999);
+ expect(result.groupKey).toBe("memory-abc-def");
});
});
@@ -181,8 +175,11 @@ describe("cleanup_cache_memory", () => {
it("should skip cleanup when rate limit is below threshold", async () => {
const module = await import("./cleanup_cache_memory.cjs");
- mockGithub.rest.rateLimit.get.mockResolvedValueOnce({
- data: { rate: { remaining: 50 } },
+ mockGithub.rest.rateLimit.get.mockResolvedValue({
+ data: {
+ rate: { remaining: 50, limit: 5000, used: 4950 },
+ resources: {},
+ },
});
await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
@@ -287,7 +284,7 @@ describe("cleanup_cache_memory", () => {
it("should handle paginated cache list results", async () => {
const module = await import("./cleanup_cache_memory.cjs");
- // First page - full page of 100 items (simulated with 2 for testing)
+ // First page - full page of 100 items
const page1Caches = [];
for (let i = 0; i < 100; i++) {
page1Caches.push({ id: i + 1, key: `memory-none-nopolicy-wf-${1000 + i}` });
@@ -330,9 +327,31 @@ describe("cleanup_cache_memory", () => {
data: { total_count: 15, actions_caches: caches },
});
- // First rate limit check: OK (initial check before starting)
- // After 10 deletions: rate limit is too low
- mockGithub.rest.rateLimit.get.mockResolvedValueOnce({ data: { rate: { remaining: 5000 } } }).mockResolvedValueOnce({ data: { rate: { remaining: 50 } } });
+ // Rate limit calls:
+ // 1. fetchAndLogRateLimit at start of main โ rateLimit.get
+ // 2-3. checkRateLimit (initial) โ fetchAndLogRateLimit + rateLimit.get
+ // ... 10 deletions ...
+ // 4-5. checkRateLimit (periodic) โ fetchAndLogRateLimit + rateLimit.get
+ // We want call 4 or 5 to return low rate limit to trigger early stop
+ let callCount = 0;
+ mockGithub.rest.rateLimit.get.mockImplementation(() => {
+ callCount++;
+ // Return low rate limit starting from the periodic check (call 4+)
+ if (callCount >= 4) {
+ return Promise.resolve({
+ data: {
+ rate: { remaining: 50, limit: 5000, used: 4950 },
+ resources: {},
+ },
+ });
+ }
+ return Promise.resolve({
+ data: {
+ rate: { remaining: 5000, limit: 5000, used: 0 },
+ resources: {},
+ },
+ });
+ });
mockGithub.rest.actions.deleteActionsCacheById.mockResolvedValue({});
@@ -343,4 +362,81 @@ describe("cleanup_cache_memory", () => {
expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("Rate limit getting low"));
});
});
+
+ describe("main - logging", () => {
+ it("should log kept entries with their keys", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: {
+ total_count: 2,
+ actions_caches: [
+ { id: 1, key: "memory-none-nopolicy-wf-100" },
+ { id: 2, key: "memory-none-nopolicy-wf-200" },
+ ],
+ },
+ });
+
+ mockGithub.rest.actions.deleteActionsCacheById.mockResolvedValue({});
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("Keeping: memory-none-nopolicy-wf-200"));
+ });
+
+ it("should log repository info", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: { total_count: 0, actions_caches: [] },
+ });
+
+ await module.main({ deleteDelayMs: 0, listDelayMs: 0 });
+
+ expect(mockCore.info).toHaveBeenCalledWith(expect.stringContaining("Repository: testowner/testrepo"));
+ });
+ });
+
+ describe("listMemoryCaches - sort order", () => {
+ it("should request caches sorted by last_accessed_at descending", async () => {
+ const module = await import("./cleanup_cache_memory.cjs");
+
+ mockGithub.rest.actions.getActionsCacheList.mockResolvedValueOnce({
+ data: { total_count: 0, actions_caches: [] },
+ });
+
+ await module.listMemoryCaches(mockGithub, "testowner", "testrepo", 0);
+
+ expect(mockGithub.rest.actions.getActionsCacheList).toHaveBeenCalledWith(
+ expect.objectContaining({
+ sort: "last_accessed_at",
+ direction: "desc",
+ })
+ );
+ });
+ });
+
+ describe("listMemoryCaches - upper bound", () => {
+ it("should respect MAX_LIST_PAGES limit", async () => {
+ const { listMemoryCaches, MAX_LIST_PAGES } = await import("./cleanup_cache_memory.cjs");
+
+ // Return full pages forever
+ mockGithub.rest.actions.getActionsCacheList.mockImplementation(({ page }) => {
+ const caches = [];
+ for (let i = 0; i < 100; i++) {
+ caches.push({ id: page * 100 + i, key: `memory-none-nopolicy-wf-${page * 1000 + i}` });
+ }
+ return Promise.resolve({
+ data: { total_count: 10000, actions_caches: caches },
+ });
+ });
+
+ const result = await listMemoryCaches(mockGithub, "testowner", "testrepo", 0);
+
+ // Should stop at MAX_LIST_PAGES
+ expect(mockGithub.rest.actions.getActionsCacheList).toHaveBeenCalledTimes(MAX_LIST_PAGES);
+ expect(result.length).toBe(MAX_LIST_PAGES * 100);
+ expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("maximum page limit"));
+ });
+ });
});
diff --git a/actions/setup/js/rate_limit_helpers.cjs b/actions/setup/js/rate_limit_helpers.cjs
new file mode 100644
index 00000000000..52c74e73723
--- /dev/null
+++ b/actions/setup/js/rate_limit_helpers.cjs
@@ -0,0 +1,51 @@
+// @ts-check
+///
+
+const { fetchAndLogRateLimit } = require("./github_rate_limit_logger.cjs");
+
+/**
+ * Minimum rate limit remaining before we skip further operations.
+ * This reserves capacity for other workflow jobs and API consumers.
+ */
+const MIN_RATE_LIMIT_REMAINING = 100;
+
+/**
+ * Check the current rate limit and determine if we should continue.
+ * Returns the remaining requests count, or -1 if we couldn't check.
+ * Also logs the rate limit snapshot for observability.
+ *
+ * @param {any} github - GitHub REST client
+ * @param {string} [operation="rate_limit_check"] - Label for the log entry
+ * @returns {Promise} Remaining requests, or -1 on error
+ */
+async function getRateLimitRemaining(github, operation = "rate_limit_check") {
+ try {
+ await fetchAndLogRateLimit(github, operation);
+ const { data } = await github.rest.rateLimit.get();
+ return data.rate.remaining;
+ } catch {
+ return -1;
+ }
+}
+
+/**
+ * Check if the current rate limit is sufficient for operations.
+ * Logs a warning and returns false if the rate limit is too low.
+ *
+ * @param {any} github - GitHub REST client
+ * @param {string} [operation="rate_limit_check"] - Label for the log entry
+ * @returns {Promise<{ok: boolean, remaining: number}>}
+ */
+async function checkRateLimit(github, operation = "rate_limit_check") {
+ const remaining = await getRateLimitRemaining(github, operation);
+ if (remaining !== -1 && remaining < MIN_RATE_LIMIT_REMAINING) {
+ return { ok: false, remaining };
+ }
+ return { ok: true, remaining };
+}
+
+module.exports = {
+ MIN_RATE_LIMIT_REMAINING,
+ getRateLimitRemaining,
+ checkRateLimit,
+};
diff --git a/actions/setup/js/rate_limit_helpers.test.cjs b/actions/setup/js/rate_limit_helpers.test.cjs
new file mode 100644
index 00000000000..61511e2c564
--- /dev/null
+++ b/actions/setup/js/rate_limit_helpers.test.cjs
@@ -0,0 +1,83 @@
+// @ts-check
+import { describe, it, expect, beforeEach, vi } from "vitest";
+
+// Mock core global (needed by github_rate_limit_logger.cjs)
+const mockCore = {
+ info: vi.fn(),
+ warning: vi.fn(),
+ error: vi.fn(),
+};
+
+global.core = mockCore;
+
+describe("rate_limit_helpers", () => {
+ let mockGithub;
+
+ beforeEach(() => {
+ vi.clearAllMocks();
+ mockGithub = {
+ rest: {
+ rateLimit: {
+ get: vi.fn().mockResolvedValue({
+ data: {
+ rate: { remaining: 5000, limit: 5000, used: 0 },
+ resources: {},
+ },
+ }),
+ },
+ },
+ };
+ });
+
+ describe("getRateLimitRemaining", () => {
+ it("should return remaining rate limit", async () => {
+ const { getRateLimitRemaining } = await import("./rate_limit_helpers.cjs");
+ const remaining = await getRateLimitRemaining(mockGithub, "test");
+ expect(remaining).toBe(5000);
+ });
+
+ it("should return -1 on error", async () => {
+ const { getRateLimitRemaining } = await import("./rate_limit_helpers.cjs");
+ mockGithub.rest.rateLimit.get.mockRejectedValueOnce(new Error("API error")).mockRejectedValueOnce(new Error("API error"));
+ const remaining = await getRateLimitRemaining(mockGithub, "test");
+ expect(remaining).toBe(-1);
+ });
+ });
+
+ describe("checkRateLimit", () => {
+ it("should return ok when rate limit is sufficient", async () => {
+ const { checkRateLimit } = await import("./rate_limit_helpers.cjs");
+ const result = await checkRateLimit(mockGithub, "test");
+ expect(result.ok).toBe(true);
+ expect(result.remaining).toBe(5000);
+ });
+
+ it("should return not ok when rate limit is too low", async () => {
+ const { checkRateLimit } = await import("./rate_limit_helpers.cjs");
+ mockGithub.rest.rateLimit.get.mockResolvedValue({
+ data: {
+ rate: { remaining: 50, limit: 5000, used: 4950 },
+ resources: {},
+ },
+ });
+ const result = await checkRateLimit(mockGithub, "test");
+ expect(result.ok).toBe(false);
+ expect(result.remaining).toBe(50);
+ });
+
+ it("should return ok when rate limit check fails", async () => {
+ const { checkRateLimit } = await import("./rate_limit_helpers.cjs");
+ mockGithub.rest.rateLimit.get.mockRejectedValue(new Error("API error"));
+ const result = await checkRateLimit(mockGithub, "test");
+ expect(result.ok).toBe(true);
+ expect(result.remaining).toBe(-1);
+ });
+ });
+
+ describe("MIN_RATE_LIMIT_REMAINING", () => {
+ it("should be 100", async () => {
+ const { MIN_RATE_LIMIT_REMAINING } = await import("./rate_limit_helpers.cjs");
+ expect(MIN_RATE_LIMIT_REMAINING).toBe(100);
+ });
+ });
+});
From c53967e92fdc1f248fb1ebdadf6450427f34751e Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 14:34:24 +0000
Subject: [PATCH 4/7] feat: add clean_cache_memories operation to
workflow_dispatch
Add clean_cache_memories as a new workflow_dispatch operation so the
cleanup-cache-memory job can be triggered on demand. The job now runs
on schedule, empty operation, or the clean_cache_memories operation.
The run_operation job excludes this operation since it has its own
dedicated job.
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/5f72e332-33fd-4059-985e-30fe470376b7
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/agentics-maintenance.yml | 5 +++--
pkg/workflow/maintenance_workflow.go | 9 +++++----
pkg/workflow/maintenance_workflow_test.go | 23 +++++++++++++++++++---
3 files changed, 28 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/agentics-maintenance.yml b/.github/workflows/agentics-maintenance.yml
index ac0b35306c1..7df10ecbc42 100644
--- a/.github/workflows/agentics-maintenance.yml
+++ b/.github/workflows/agentics-maintenance.yml
@@ -50,6 +50,7 @@ on:
- 'upgrade'
- 'safe_outputs'
- 'create_labels'
+ - 'clean_cache_memories'
- 'validate'
run_url:
description: 'Run URL or run ID to replay safe outputs from (e.g. https://github.com/owner/repo/actions/runs/12345 or 12345). Required when operation is safe_outputs.'
@@ -108,7 +109,7 @@ jobs:
await main();
cleanup-cache-memory:
- if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '') }}
+ if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '' || github.event.inputs.operation == 'clean_cache_memories') }}
runs-on: ubuntu-slim
permissions:
actions: write
@@ -135,7 +136,7 @@ jobs:
await main();
run_operation:
- if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
+ if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'clean_cache_memories' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
runs-on: ubuntu-slim
permissions:
actions: write
diff --git a/pkg/workflow/maintenance_workflow.go b/pkg/workflow/maintenance_workflow.go
index fff0938d306..05ed341078e 100644
--- a/pkg/workflow/maintenance_workflow.go
+++ b/pkg/workflow/maintenance_workflow.go
@@ -262,6 +262,7 @@ on:
- 'upgrade'
- 'safe_outputs'
- 'create_labels'
+ - 'clean_cache_memories'
- 'validate'
run_url:
description: 'Run URL or run ID to replay safe outputs from (e.g. https://github.com/owner/repo/actions/runs/12345 or 12345). Required when operation is safe_outputs.'
@@ -343,12 +344,12 @@ jobs:
await main();
`)
- // Add cleanup-cache-memory job for scheduled runs
+ // Add cleanup-cache-memory job for scheduled runs and clean_cache_memories operation
// This job lists all caches starting with "memory-", groups them by key prefix,
// keeps the latest run ID per group, and deletes the rest.
yaml.WriteString(`
cleanup-cache-memory:
- if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '') }}
+ if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '' || github.event.inputs.operation == 'clean_cache_memories') }}
runs-on: ` + runsOnValue + `
permissions:
actions: write
@@ -380,10 +381,10 @@ jobs:
await main();
`)
- // Add unified run_operation job for all dispatch operations except safe_outputs, create_labels, and validate
+ // Add unified run_operation job for all dispatch operations except safe_outputs, create_labels, clean_cache_memories, and validate
yaml.WriteString(`
run_operation:
- if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
+ if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'clean_cache_memories' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
runs-on: ` + runsOnValue + `
permissions:
actions: write
diff --git a/pkg/workflow/maintenance_workflow_test.go b/pkg/workflow/maintenance_workflow_test.go
index 6ef687a14f3..7dc059e8b52 100644
--- a/pkg/workflow/maintenance_workflow_test.go
+++ b/pkg/workflow/maintenance_workflow_test.go
@@ -281,15 +281,16 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
yaml := string(content)
operationSkipCondition := `github.event_name != 'workflow_dispatch' || github.event.inputs.operation == ''`
- operationRunCondition := `github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'validate'`
+ operationRunCondition := `github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'clean_cache_memories' && github.event.inputs.operation != 'validate'`
applySafeOutputsCondition := `github.event_name == 'workflow_dispatch' && github.event.inputs.operation == 'safe_outputs'`
createLabelsCondition := `github.event_name == 'workflow_dispatch' && github.event.inputs.operation == 'create_labels'`
+ cleanCacheMemoriesCondition := `github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '' || github.event.inputs.operation == 'clean_cache_memories'`
const jobSectionSearchRange = 300
- const runOpSectionSearchRange = 400
+ const runOpSectionSearchRange = 500
// Jobs that should be disabled when operation is set
- disabledJobs := []string{"cleanup-cache-memory:", "close-expired-entities:", "compile-workflows:", "secret-validation:", "zizmor-scan:"}
+ disabledJobs := []string{"close-expired-entities:", "compile-workflows:", "secret-validation:", "zizmor-scan:"}
for _, job := range disabledJobs {
// Find the if: condition for each job
jobIdx := strings.Index(yaml, "\n "+job)
@@ -304,6 +305,17 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
}
}
+ // cleanup-cache-memory job should run on schedule, empty operation, or clean_cache_memories operation
+ cleanupCacheIdx := strings.Index(yaml, "\n cleanup-cache-memory:")
+ if cleanupCacheIdx == -1 {
+ t.Errorf("Job cleanup-cache-memory not found in generated workflow")
+ } else {
+ cleanupCacheSection := yaml[cleanupCacheIdx : cleanupCacheIdx+jobSectionSearchRange]
+ if !strings.Contains(cleanupCacheSection, cleanCacheMemoriesCondition) {
+ t.Errorf("Job cleanup-cache-memory should have the clean_cache_memories condition %q in:\n%s", cleanCacheMemoriesCondition, cleanupCacheSection)
+ }
+ }
+
// run_operation job should NOT have the skip condition but should have its own activation condition
// and should exclude safe_outputs
runOpIdx := strings.Index(yaml, "\n run_operation:")
@@ -363,6 +375,11 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
t.Error("workflow_dispatch operation choices should include 'safe_outputs'")
}
+ // Verify clean_cache_memories is an option in the operation choices
+ if !strings.Contains(yaml, "- 'clean_cache_memories'") {
+ t.Error("workflow_dispatch operation choices should include 'clean_cache_memories'")
+ }
+
// Verify validate is an option in the operation choices
if !strings.Contains(yaml, "- 'validate'") {
t.Error("workflow_dispatch operation choices should include 'validate'")
From 5a6ae0e747dacff95d08b7174d89bd1744387ed4 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 14:38:22 +0000
Subject: [PATCH 5/7] refactor: improve comments for operation exclusion
clarity
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/5f72e332-33fd-4059-985e-30fe470376b7
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
pkg/workflow/maintenance_workflow.go | 2 +-
pkg/workflow/maintenance_workflow_test.go | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/pkg/workflow/maintenance_workflow.go b/pkg/workflow/maintenance_workflow.go
index 05ed341078e..f6a32e4ca85 100644
--- a/pkg/workflow/maintenance_workflow.go
+++ b/pkg/workflow/maintenance_workflow.go
@@ -381,7 +381,7 @@ jobs:
await main();
`)
- // Add unified run_operation job for all dispatch operations except safe_outputs, create_labels, clean_cache_memories, and validate
+ // Add unified run_operation job for all dispatch operations except those with dedicated jobs (safe_outputs, create_labels, clean_cache_memories, validate)
yaml.WriteString(`
run_operation:
if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'clean_cache_memories' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
diff --git a/pkg/workflow/maintenance_workflow_test.go b/pkg/workflow/maintenance_workflow_test.go
index 7dc059e8b52..7348cb6bd38 100644
--- a/pkg/workflow/maintenance_workflow_test.go
+++ b/pkg/workflow/maintenance_workflow_test.go
@@ -289,7 +289,7 @@ func TestGenerateMaintenanceWorkflow_OperationJobConditions(t *testing.T) {
const jobSectionSearchRange = 300
const runOpSectionSearchRange = 500
- // Jobs that should be disabled when operation is set
+ // Jobs that should be disabled when any non-dedicated operation is set (cleanup-cache-memory has its own dedicated operation)
disabledJobs := []string{"close-expired-entities:", "compile-workflows:", "secret-validation:", "zizmor-scan:"}
for _, job := range disabledJobs {
// Find the if: condition for each job
From a83d229e2622a14b6ffc463822c26bd82beeeb9b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 14:55:05 +0000
Subject: [PATCH 6/7] refactor: use expression builder for maintenance workflow
if conditions
Replace hand-written if expressions for cleanup-cache-memory and
run_operation jobs with composable expression builder functions.
Introduces buildNotForkCondition, buildNotDispatchOrEmptyOperation,
buildNotForkAndScheduledOrOperation, and buildRunOperationCondition
helpers that leverage RenderCondition with optimizer.
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/564047d2-495a-4162-b2e2-6bc4f638dd34
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
.github/workflows/agentics-maintenance.yml | 4 +-
pkg/workflow/maintenance_workflow.go | 72 +++++++++++++++++++++-
2 files changed, 72 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/agentics-maintenance.yml b/.github/workflows/agentics-maintenance.yml
index 7df10ecbc42..8c142f3ab1a 100644
--- a/.github/workflows/agentics-maintenance.yml
+++ b/.github/workflows/agentics-maintenance.yml
@@ -109,7 +109,7 @@ jobs:
await main();
cleanup-cache-memory:
- if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '' || github.event.inputs.operation == 'clean_cache_memories') }}
+ if: ${{ (!(github.event.repository.fork)) && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '' || github.event.inputs.operation == 'clean_cache_memories') }}
runs-on: ubuntu-slim
permissions:
actions: write
@@ -136,7 +136,7 @@ jobs:
await main();
run_operation:
- if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'clean_cache_memories' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
+ if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'clean_cache_memories' && github.event.inputs.operation != 'validate' && (!(github.event.repository.fork)) }}
runs-on: ubuntu-slim
permissions:
actions: write
diff --git a/pkg/workflow/maintenance_workflow.go b/pkg/workflow/maintenance_workflow.go
index f6a32e4ca85..5f582cd2335 100644
--- a/pkg/workflow/maintenance_workflow.go
+++ b/pkg/workflow/maintenance_workflow.go
@@ -347,9 +347,10 @@ jobs:
// Add cleanup-cache-memory job for scheduled runs and clean_cache_memories operation
// This job lists all caches starting with "memory-", groups them by key prefix,
// keeps the latest run ID per group, and deletes the rest.
+ cleanupCacheCondition := buildNotForkAndScheduledOrOperation("clean_cache_memories")
yaml.WriteString(`
cleanup-cache-memory:
- if: ${{ !github.event.repository.fork && (github.event_name != 'workflow_dispatch' || github.event.inputs.operation == '' || github.event.inputs.operation == 'clean_cache_memories') }}
+ if: ${{ ` + RenderCondition(cleanupCacheCondition) + ` }}
runs-on: ` + runsOnValue + `
permissions:
actions: write
@@ -382,9 +383,10 @@ jobs:
`)
// Add unified run_operation job for all dispatch operations except those with dedicated jobs (safe_outputs, create_labels, clean_cache_memories, validate)
+ runOperationCondition := buildRunOperationCondition("safe_outputs", "create_labels", "clean_cache_memories", "validate")
yaml.WriteString(`
run_operation:
- if: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.operation != '' && github.event.inputs.operation != 'safe_outputs' && github.event.inputs.operation != 'create_labels' && github.event.inputs.operation != 'clean_cache_memories' && github.event.inputs.operation != 'validate' && !github.event.repository.fork }}
+ if: ${{ ` + RenderCondition(runOperationCondition) + ` }}
runs-on: ` + runsOnValue + `
permissions:
actions: write
@@ -710,3 +712,69 @@ jobs:
maintenanceLog.Print("Maintenance workflow generated successfully")
return nil
}
+
+// buildNotForkCondition creates a condition to check the repository is not a fork.
+func buildNotForkCondition() ConditionNode {
+ return &NotNode{
+ Child: BuildPropertyAccess("github.event.repository.fork"),
+ }
+}
+
+// buildNotDispatchOrEmptyOperation creates a condition that is true when the event
+// is not a workflow_dispatch or the operation input is empty.
+func buildNotDispatchOrEmptyOperation() ConditionNode {
+ return BuildOr(
+ BuildNotEquals(
+ BuildPropertyAccess("github.event_name"),
+ BuildStringLiteral("workflow_dispatch"),
+ ),
+ BuildEquals(
+ BuildPropertyAccess("github.event.inputs.operation"),
+ BuildStringLiteral(""),
+ ),
+ )
+}
+
+// buildNotForkAndScheduledOrOperation creates a condition for jobs that run on
+// schedule (or empty operation) AND when a specific operation is selected.
+// Condition: !fork && (not_dispatch || operation == โ || operation == op)
+func buildNotForkAndScheduledOrOperation(operation string) ConditionNode {
+ return BuildAnd(
+ buildNotForkCondition(),
+ BuildOr(
+ buildNotDispatchOrEmptyOperation(),
+ BuildEquals(
+ BuildPropertyAccess("github.event.inputs.operation"),
+ BuildStringLiteral(operation),
+ ),
+ ),
+ )
+}
+
+// buildRunOperationCondition creates the condition for the unified run_operation
+// job that handles all dispatch operations except the ones with dedicated jobs.
+// Condition: dispatch && operation != โ && operation != each excluded && !fork.
+func buildRunOperationCondition(excludedOperations ...string) ConditionNode {
+ // Start with: event is workflow_dispatch AND operation is not empty
+ condition := BuildAnd(
+ BuildEventTypeEquals("workflow_dispatch"),
+ BuildNotEquals(
+ BuildPropertyAccess("github.event.inputs.operation"),
+ BuildStringLiteral(""),
+ ),
+ )
+
+ // Exclude each dedicated operation
+ for _, op := range excludedOperations {
+ condition = BuildAnd(
+ condition,
+ BuildNotEquals(
+ BuildPropertyAccess("github.event.inputs.operation"),
+ BuildStringLiteral(op),
+ ),
+ )
+ }
+
+ // AND not a fork
+ return BuildAnd(condition, buildNotForkCondition())
+}
From 600d18533cdffd3107d0668b3688dcd236543f68 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Sun, 12 Apr 2026 15:00:13 +0000
Subject: [PATCH 7/7] fix: correct smart quotes in condition comments
Agent-Logs-Url: https://github.com/github/gh-aw/sessions/564047d2-495a-4162-b2e2-6bc4f638dd34
Co-authored-by: pelikhan <4175913+pelikhan@users.noreply.github.com>
---
pkg/workflow/maintenance_workflow.go | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pkg/workflow/maintenance_workflow.go b/pkg/workflow/maintenance_workflow.go
index 5f582cd2335..c7c2b1fb2ee 100644
--- a/pkg/workflow/maintenance_workflow.go
+++ b/pkg/workflow/maintenance_workflow.go
@@ -737,7 +737,7 @@ func buildNotDispatchOrEmptyOperation() ConditionNode {
// buildNotForkAndScheduledOrOperation creates a condition for jobs that run on
// schedule (or empty operation) AND when a specific operation is selected.
-// Condition: !fork && (not_dispatch || operation == โ || operation == op)
+// Condition: !fork && (not_dispatch || operation == '' || operation == op)
func buildNotForkAndScheduledOrOperation(operation string) ConditionNode {
return BuildAnd(
buildNotForkCondition(),
@@ -753,7 +753,7 @@ func buildNotForkAndScheduledOrOperation(operation string) ConditionNode {
// buildRunOperationCondition creates the condition for the unified run_operation
// job that handles all dispatch operations except the ones with dedicated jobs.
-// Condition: dispatch && operation != โ && operation != each excluded && !fork.
+// Condition: dispatch && operation != '' && operation != each excluded && !fork.
func buildRunOperationCondition(excludedOperations ...string) ConditionNode {
// Start with: event is workflow_dispatch AND operation is not empty
condition := BuildAnd(