diff --git a/.github/workflows/contribution-check.lock.yml b/.github/workflows/contribution-check.lock.yml index 837c3c90d5d..ac9a27254bc 100644 --- a/.github/workflows/contribution-check.lock.yml +++ b/.github/workflows/contribution-check.lock.yml @@ -21,7 +21,7 @@ # For more information: https://github.github.com/gh-aw/introduction/overview/ # # -# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"f065feb7ad239a734f6def6a44417a260407d25d6d0fb58e829e44ca32ef065c","strict":true,"agent_id":"copilot"} +# gh-aw-metadata: {"schema_version":"v3","frontmatter_hash":"c6a3f4c1233714c024d91fc9b1c524d86770132bae2044a11ce34b31aeaaa870","strict":true,"agent_id":"copilot"} name: "Contribution Check" "on": @@ -349,7 +349,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > ${RUNNER_TEMP}/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' - {"add_comment":{"hide_older_comments":true,"max":10,"target":"*","target-repo":"${{ vars.TARGET_REPOSITORY }}"},"add_labels":{"allowed":["spam","needs-work","outdated","lgtm"],"max":4,"target":"*","target-repo":"${{ vars.TARGET_REPOSITORY }}"},"create_issue":{"close_older_issues":true,"expires":24,"labels":["contribution-report"],"max":1,"title_prefix":"[Contribution Check Report]"},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}} + {"add_comment":{"hide_older_comments":true,"max":10,"target":"*","target-repo":"${{ vars.TARGET_REPOSITORY }}"},"add_labels":{"allowed":["spam","needs-work","outdated","lgtm"],"max":4,"target":"*","target-repo":"${{ vars.TARGET_REPOSITORY }}"},"create_issue":{"close_older_issues":true,"expires":24,"group_by_day":true,"labels":["contribution-report"],"max":1,"title_prefix":"[Contribution Check Report]"},"missing_data":{},"missing_tool":{},"noop":{"max":1,"report-as-issue":"true"}} GH_AW_SAFE_OUTPUTS_CONFIG_EOF - name: Write Safe Outputs Tools run: | @@ -1102,7 +1102,7 @@ jobs: GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com,www.googleapis.com" GITHUB_SERVER_URL: ${{ github.server_url }} GITHUB_API_URL: ${{ github.api_url }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":10,\"target\":\"*\",\"target-repo\":\"${{ vars.TARGET_REPOSITORY }}\"},\"add_labels\":{\"allowed\":[\"spam\",\"needs-work\",\"outdated\",\"lgtm\"],\"max\":4,\"target\":\"*\",\"target-repo\":\"${{ vars.TARGET_REPOSITORY }}\"},\"create_issue\":{\"close_older_issues\":true,\"expires\":24,\"labels\":[\"contribution-report\"],\"max\":1,\"title_prefix\":\"[Contribution Check Report]\"},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":10,\"target\":\"*\",\"target-repo\":\"${{ vars.TARGET_REPOSITORY }}\"},\"add_labels\":{\"allowed\":[\"spam\",\"needs-work\",\"outdated\",\"lgtm\"],\"max\":4,\"target\":\"*\",\"target-repo\":\"${{ vars.TARGET_REPOSITORY }}\"},\"create_issue\":{\"close_older_issues\":true,\"expires\":24,\"group_by_day\":true,\"labels\":[\"contribution-report\"],\"max\":1,\"title_prefix\":\"[Contribution Check Report]\"},\"missing_data\":{},\"missing_tool\":{},\"noop\":{\"max\":1,\"report-as-issue\":\"true\"}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/contribution-check.md b/.github/workflows/contribution-check.md index d0133c81299..708bee21c68 100644 --- a/.github/workflows/contribution-check.md +++ b/.github/workflows/contribution-check.md @@ -23,6 +23,7 @@ safe-outputs: labels: - contribution-report close-older-issues: true + group-by-day: true expires: 1d add-labels: allowed: [spam, needs-work, outdated, lgtm] diff --git a/actions/setup/js/close_older_issues.cjs b/actions/setup/js/close_older_issues.cjs index e630caf5541..d225684df54 100644 --- a/actions/setup/js/close_older_issues.cjs +++ b/actions/setup/js/close_older_issues.cjs @@ -29,7 +29,7 @@ const API_DELAY_MS = 500; * @param {string} [closeOlderKey] - Optional explicit deduplication key. When set, the * `gh-aw-close-key` marker is used as the primary search term and exact filter instead * of the workflow-id / workflow-call-id markers. - * @returns {Promise}>>} Matching issues + * @returns {Promise, created_at: string}>>} Matching issues */ async function searchOlderIssues(github, owner, repo, workflowId, excludeNumber, callerWorkflowId, closeOlderKey) { core.info(`Starting search for older issues in ${owner}/${repo}`); @@ -121,6 +121,7 @@ async function searchOlderIssues(github, owner, repo, workflowId, excludeNumber, title: item.title, html_url: item.html_url, labels: item.labels || [], + created_at: item.created_at, })); core.info(`Filtering complete:`); diff --git a/actions/setup/js/create_issue.cjs b/actions/setup/js/create_issue.cjs index c3c8f7ff4b3..72897d5d2c1 100644 --- a/actions/setup/js/create_issue.cjs +++ b/actions/setup/js/create_issue.cjs @@ -40,7 +40,7 @@ const { ERR_VALIDATION } = require("./error_codes.cjs"); const { renderTemplateFromFile } = require("./messages_core.cjs"); const { createExpirationLine, addExpirationToFooter } = require("./ephemerals.cjs"); const { MAX_SUB_ISSUES, getSubIssueCount } = require("./sub_issue_helpers.cjs"); -const { closeOlderIssues } = require("./close_older_issues.cjs"); +const { closeOlderIssues, searchOlderIssues, addIssueComment } = require("./close_older_issues.cjs"); const { parseBoolTemplatable } = require("./templatable.cjs"); const { tryEnforceArrayLimit } = require("./limit_enforcement_helpers.cjs"); const { logStagedPreviewInfo } = require("./staged_preview.cjs"); @@ -205,6 +205,7 @@ async function main(config = {}) { const { defaultTargetRepo, allowedRepos } = resolveTargetRepoConfig(config); const groupEnabled = parseBoolTemplatable(config.group, false); const closeOlderIssuesEnabled = parseBoolTemplatable(config.close_older_issues, false); + const groupByDayEnabled = parseBoolTemplatable(config.group_by_day, false); const rawCloseOlderKey = config.close_older_key ? String(config.close_older_key) : ""; const closeOlderKey = rawCloseOlderKey ? normalizeCloseOlderKey(rawCloseOlderKey) : ""; if (rawCloseOlderKey && !closeOlderKey) { @@ -248,6 +249,12 @@ async function main(config = {}) { core.info(` Using explicit close-older-key: "${closeOlderKey}"`); } } + if (groupByDayEnabled) { + core.info(`Group-by-day mode enabled: if an open issue was already created today, new content will be posted as a comment`); + if (!closeOlderKey && !process.env.GH_AW_WORKFLOW_ID) { + core.warning(`Group-by-day mode has no effect: neither close-older-key nor GH_AW_WORKFLOW_ID is set — issues cannot be searched`); + } + } // Track how many items we've processed for max limit let processedCount = 0; @@ -283,8 +290,6 @@ async function main(config = {}) { }; } - processedCount++; - // Merge external resolved temp IDs with our local map if (resolvedTemporaryIds) { for (const [tempId, resolved] of Object.entries(resolvedTemporaryIds)) { @@ -480,6 +485,49 @@ async function main(config = {}) { bodyLines.push(""); const body = bodyLines.join("\n").trim(); + // Group-by-day check: if enabled, search for an existing open issue created today. + // When found, post the new content as a comment on the existing issue instead of + // creating a duplicate. This groups multiple same-day runs into a single issue. + // The max-count slot is NOT consumed when posting as a comment (processedCount is + // only incremented below, just before actual issue creation). + if (groupByDayEnabled && (closeOlderKey || workflowId)) { + const today = new Date().toISOString().split("T")[0]; // YYYY-MM-DD (UTC) + try { + const existingIssues = await searchOlderIssues( + githubClient, + repoParts.owner, + repoParts.repo, + workflowId, + 0, // no issue to exclude — this is a pre-creation check + callerWorkflowId, + closeOlderKey + ); + const todayIssue = existingIssues.find(issue => { + const createdDate = issue.created_at ? String(issue.created_at).split("T")[0] : ""; + return createdDate === today; + }); + if (todayIssue) { + core.info(`Group-by-day: found open issue #${todayIssue.number} created today (${today}) — posting new content as a comment`); + const comment = await addIssueComment(githubClient, repoParts.owner, repoParts.repo, todayIssue.number, body); + core.info(`Posted content as comment ${comment.html_url} on issue #${todayIssue.number}`); + return { + success: true, + grouped: true, + existingIssueNumber: todayIssue.number, + existingIssueUrl: todayIssue.html_url, + commentUrl: comment.html_url, + }; + } + } catch (error) { + // Log but do not abort — fall through to normal creation + core.warning(`Group-by-day pre-check failed: ${getErrorMessage(error)} — proceeding with issue creation`); + } + } + + // Increment processed count only when we are about to create an issue + // (group-by-day comment paths return above without consuming a slot) + processedCount++; + core.info(`Creating issue in ${qualifiedItemRepo} with title: ${title}`); core.info(`Labels: ${labels.join(", ")}`); if (assignees.length > 0) { diff --git a/actions/setup/js/create_issue.test.cjs b/actions/setup/js/create_issue.test.cjs index 2c2d2069233..47d85786042 100644 --- a/actions/setup/js/create_issue.test.cjs +++ b/actions/setup/js/create_issue.test.cjs @@ -29,7 +29,12 @@ describe("create_issue", () => { title: "Test Issue", }, }), - createComment: vi.fn().mockResolvedValue({}), + createComment: vi.fn().mockResolvedValue({ + data: { + id: 456, + html_url: "https://github.com/owner/repo/issues/99#issuecomment-456", + }, + }), }, search: { issuesAndPullRequests: vi.fn().mockResolvedValue({ @@ -466,4 +471,148 @@ describe("create_issue", () => { expect(result.error).toContain("received 6"); }); }); + + describe("group-by-day mode", () => { + it("should post new content as a comment if an open issue was already created today", async () => { + const today = new Date().toISOString().split("T")[0]; + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValueOnce({ + data: { + total_count: 1, + items: [ + { + number: 99, + title: "[Contribution Check Report] Contribution Check", + html_url: "https://github.com/test-owner/test-repo/issues/99", + body: "", + created_at: `${today}T10:00:00Z`, + state: "open", + pull_request: undefined, + }, + ], + }, + }); + + const handler = await main({ group_by_day: true, close_older_issues: true }); + const result = await handler({ title: "Test Issue", body: "Test body" }); + + expect(result.success).toBe(true); + expect(result.grouped).toBe(true); + expect(result.existingIssueNumber).toBe(99); + expect(mockGithub.rest.issues.create).not.toHaveBeenCalled(); + expect(mockGithub.rest.issues.createComment).toHaveBeenCalledWith(expect.objectContaining({ issue_number: 99 })); + }); + + it("should create issue if no open issue was created today", async () => { + const yesterday = new Date(Date.now() - 86400000).toISOString().split("T")[0]; + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValueOnce({ + data: { + total_count: 1, + items: [ + { + number: 50, + title: "[Contribution Check Report] Contribution Check", + html_url: "https://github.com/test-owner/test-repo/issues/50", + body: "", + created_at: `${yesterday}T10:00:00Z`, + state: "open", + pull_request: undefined, + }, + ], + }, + }); + + const handler = await main({ group_by_day: true, close_older_issues: true }); + const result = await handler({ title: "Test Issue", body: "Test body" }); + + expect(result.success).toBe(true); + expect(result.grouped).toBeUndefined(); + expect(mockGithub.rest.issues.create).toHaveBeenCalledOnce(); + }); + + it("should create issue if no existing issues are found", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValueOnce({ + data: { total_count: 0, items: [] }, + }); + + const handler = await main({ group_by_day: true, close_older_issues: true }); + const result = await handler({ title: "Test Issue", body: "Test body" }); + + expect(result.success).toBe(true); + expect(result.grouped).toBeUndefined(); + expect(mockGithub.rest.issues.create).toHaveBeenCalledOnce(); + }); + + it("should proceed with creation if group-by-day pre-check throws", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockRejectedValueOnce(new Error("Search API error")); + + const handler = await main({ group_by_day: true, close_older_issues: true }); + const result = await handler({ title: "Test Issue", body: "Test body" }); + + expect(result.success).toBe(true); + expect(result.grouped).toBeUndefined(); + expect(mockGithub.rest.issues.create).toHaveBeenCalledOnce(); + expect(mockCore.warning).toHaveBeenCalledWith(expect.stringContaining("Group-by-day pre-check failed")); + }); + + it("should not group if group-by-day is false even with today's issue", async () => { + const today = new Date().toISOString().split("T")[0]; + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { + total_count: 1, + items: [ + { + number: 77, + title: "Existing Issue", + html_url: "https://github.com/test-owner/test-repo/issues/77", + body: "", + created_at: `${today}T10:00:00Z`, + state: "open", + pull_request: undefined, + }, + ], + }, + }); + + // group_by_day is false (default) — creation should NOT be grouped + const handler = await main({ close_older_issues: false }); + const result = await handler({ title: "Test Issue", body: "Test body" }); + + expect(result.success).toBe(true); + expect(result.grouped).toBeUndefined(); + expect(mockGithub.rest.issues.create).toHaveBeenCalledOnce(); + }); + + it("should not consume max count slot when grouped", async () => { + const today = new Date().toISOString().split("T")[0]; + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { + total_count: 1, + items: [ + { + number: 88, + title: "Existing Issue", + html_url: "https://github.com/test-owner/test-repo/issues/88", + body: "", + created_at: `${today}T10:00:00Z`, + state: "open", + pull_request: undefined, + }, + ], + }, + }); + + const handler = await main({ group_by_day: true, close_older_issues: true, max: 1 }); + + // First call is grouped — max slot should not be consumed + const result1 = await handler({ title: "First Issue", body: "Body" }); + expect(result1.grouped).toBe(true); + + // Second call also finds today's issue — also grouped + const result2 = await handler({ title: "Second Issue", body: "Body" }); + expect(result2.grouped).toBe(true); + + // Neither call should have created an issue + expect(mockGithub.rest.issues.create).not.toHaveBeenCalled(); + }); + }); }); diff --git a/docs/src/content/docs/reference/frontmatter-full.md b/docs/src/content/docs/reference/frontmatter-full.md index 82805bf870a..54ade376735 100644 --- a/docs/src/content/docs/reference/frontmatter-full.md +++ b/docs/src/content/docs/reference/frontmatter-full.md @@ -2504,6 +2504,14 @@ safe-outputs: # (optional) close-older-key: "example-value" + # When true, if an open issue with the same close-older-key (or workflow-id marker + # when no key is set) was already created today (UTC), post the new content as a + # comment on that existing issue instead of creating a new one. Groups multiple + # same-day runs into a single issue. Works best when combined with + # close-older-issues: true. + # (optional) + group-by-day: true + # Controls whether AI-generated footer is added to the issue. When false, the # visible footer content is omitted but XML markers (workflow-id, tracker-id, # metadata) are still included for searchability. Defaults to true. diff --git a/docs/src/content/docs/reference/safe-outputs.md b/docs/src/content/docs/reference/safe-outputs.md index 9f5b86467d7..9cf7c6eabb7 100644 --- a/docs/src/content/docs/reference/safe-outputs.md +++ b/docs/src/content/docs/reference/safe-outputs.md @@ -161,6 +161,26 @@ When enabled: - Maximum 10 older issues will be closed - Only runs if the new issue creation succeeds +#### Group By Day + +The `group-by-day` field (default: `false`) groups multiple same-day workflow runs into a single issue. When enabled, the handler searches for an existing open issue created **today (UTC)** with the same workflow-id marker (or `close-older-key` if set). If found, the new content is posted as a **comment** on that existing issue instead of creating a new one. + +```yaml wrap +safe-outputs: + create-issue: + title-prefix: "[Contribution Check Report]" + labels: [report] + close-older-issues: true + group-by-day: true +``` + +This is useful for scheduled workflows (e.g. every 4 hours) that produce recurring daily reports: all runs on the same day contribute to one issue, eliminating duplicate open/closed issues. + +- Performs a pre-creation search for open issues matching the workflow-id or `close-older-key` +- If a matching issue was created today (UTC), new content is posted as a comment on it +- The max-count slot is not consumed when posting as a comment +- On failure of the pre-check, normal issue creation proceeds as a fallback + #### Searching for Workflow-Created Items All items created by workflows (issues, pull requests, discussions, and comments) include a hidden **workflow-id marker** in their body: diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json index 35076622bda..6fec138631d 100644 --- a/pkg/parser/schemas/main_workflow_schema.json +++ b/pkg/parser/schemas/main_workflow_schema.json @@ -4516,6 +4516,11 @@ "minLength": 1, "pattern": "\\S" }, + "group-by-day": { + "type": "boolean", + "description": "When true, if an open issue with the same close-older-key (or workflow-id marker when no key is set) was already created today (UTC), post the new content as a comment on that existing issue instead of creating a new one. Groups multiple same-day runs into a single issue. Works best when combined with close-older-issues: true.", + "default": false + }, "footer": { "type": "boolean", "description": "Controls whether AI-generated footer is added to the issue. When false, the visible footer content is omitted but XML markers (workflow-id, tracker-id, metadata) are still included for searchability. Defaults to true.", diff --git a/pkg/workflow/compiler_safe_outputs_config.go b/pkg/workflow/compiler_safe_outputs_config.go index 16005d8f4e4..57094cf7b6a 100644 --- a/pkg/workflow/compiler_safe_outputs_config.go +++ b/pkg/workflow/compiler_safe_outputs_config.go @@ -163,6 +163,7 @@ var handlerRegistry = map[string]handlerBuilder{ AddTemplatableBool("group", c.Group). AddTemplatableBool("close_older_issues", c.CloseOlderIssues). AddIfNotEmpty("close_older_key", c.CloseOlderKey). + AddTemplatableBool("group_by_day", c.GroupByDay). AddTemplatableBool("footer", getEffectiveFooterForTemplatable(c.Footer, cfg.Footer)). AddIfNotEmpty("github-token", c.GitHubToken). AddIfTrue("staged", c.Staged). diff --git a/pkg/workflow/create_issue.go b/pkg/workflow/create_issue.go index e70e5f90d44..87be1954f19 100644 --- a/pkg/workflow/create_issue.go +++ b/pkg/workflow/create_issue.go @@ -19,6 +19,7 @@ type CreateIssuesConfig struct { AllowedRepos []string `yaml:"allowed-repos,omitempty"` // List of additional repositories that issues can be created in CloseOlderIssues *string `yaml:"close-older-issues,omitempty"` // When true, close older issues with same title prefix or labels as "not planned" CloseOlderKey string `yaml:"close-older-key,omitempty"` // Optional explicit deduplication key for close-older matching. When set, uses gh-aw-close-key marker instead of workflow-id markers. + GroupByDay *string `yaml:"group-by-day,omitempty"` // When true, if an open issue was already created today (UTC), post new content as a comment on it instead of creating a duplicate. Works best with close-older-issues: true. Expires int `yaml:"expires,omitempty"` // Hours until the issue expires and should be automatically closed Group *string `yaml:"group,omitempty"` // If true, group issues as sub-issues under a parent issue (workflow ID is used as group identifier) Footer *string `yaml:"footer,omitempty"` // Controls whether AI-generated footer is added. When false, visible footer is omitted but XML markers are kept. @@ -41,7 +42,7 @@ func (c *Compiler) parseIssuesConfig(outputMap map[string]any) *CreateIssuesConf // Pre-process templatable bool fields: convert literal booleans to strings so that // GitHub Actions expression strings (e.g. "${{ inputs.close-older-issues }}") are also accepted. - for _, field := range []string{"close-older-issues", "group", "footer"} { + for _, field := range []string{"close-older-issues", "group", "footer", "group-by-day"} { if err := preprocessBoolFieldAsString(configData, field, createIssueLog); err != nil { createIssueLog.Printf("Invalid %s value: %v", field, err) return nil