diff --git a/.github/workflows/smoke-claude.lock.yml b/.github/workflows/smoke-claude.lock.yml index 0e4cf030121..2b596997cf2 100644 --- a/.github/workflows/smoke-claude.lock.yml +++ b/.github/workflows/smoke-claude.lock.yml @@ -208,7 +208,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'EOF' - {"add_comment":{"max":1},"add_labels":{"allowed":["smoke-claude"],"max":3},"create_issue":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1}} + {"add_comment":{"max":1},"add_labels":{"allowed":["smoke-claude"],"max":3},"create_issue":{"group":true,"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1}} EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'EOF' [ @@ -742,6 +742,7 @@ jobs: GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }} GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} run: | bash /opt/gh-aw/actions/create_prompt_first.sh @@ -962,10 +963,18 @@ jobs: ## Output - Add a **very brief** comment (max 5-10 lines) to the current pull request with: - - PR titles only (no descriptions) - - ✅ or ❌ for each test result - - Overall status: PASS or FAIL + 1. **Create an issue** with a summary of the smoke test run: + - Title: "Smoke Test: Claude - __GH_AW_GITHUB_RUN_ID__" + - Body should include: + - Test results (✅ or ❌ for each test) + - Overall status: PASS or FAIL + - Run URL: __GH_AW_GITHUB_SERVER_URL__/__GH_AW_GITHUB_REPOSITORY__/actions/runs/__GH_AW_GITHUB_RUN_ID__ + - Timestamp + + 2. Add a **very brief** comment (max 5-10 lines) to the current pull request with: + - PR titles only (no descriptions) + - ✅ or ❌ for each test result + - Overall status: PASS or FAIL If all tests pass, add the label `smoke-claude` to the pull request. @@ -981,6 +990,7 @@ jobs: GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }} GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} with: script: | @@ -997,6 +1007,7 @@ jobs: GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_SERVER_URL: process.env.GH_AW_GITHUB_SERVER_URL, GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE } }); @@ -1006,6 +1017,7 @@ jobs: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }} with: script: | const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); @@ -1626,7 +1638,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":1},\"add_labels\":{\"allowed\":[\"smoke-claude\"]},\"create_issue\":{\"expires\":2,\"max\":1},\"missing_data\":{},\"missing_tool\":{}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":1},\"add_labels\":{\"allowed\":[\"smoke-claude\"]},\"create_issue\":{\"expires\":2,\"group\":true,\"max\":1},\"missing_data\":{},\"missing_tool\":{}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/smoke-claude.md b/.github/workflows/smoke-claude.md index 6c35a4eacdd..0cf82592ae6 100644 --- a/.github/workflows/smoke-claude.md +++ b/.github/workflows/smoke-claude.md @@ -45,6 +45,7 @@ safe-outputs: hide-older-comments: true create-issue: expires: 2h + group: true add-labels: allowed: [smoke-claude] messages: @@ -70,9 +71,17 @@ timeout-minutes: 10 ## Output -Add a **very brief** comment (max 5-10 lines) to the current pull request with: -- PR titles only (no descriptions) -- ✅ or ❌ for each test result -- Overall status: PASS or FAIL +1. **Create an issue** with a summary of the smoke test run: + - Title: "Smoke Test: Claude - ${{ github.run_id }}" + - Body should include: + - Test results (✅ or ❌ for each test) + - Overall status: PASS or FAIL + - Run URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + - Timestamp + +2. Add a **very brief** comment (max 5-10 lines) to the current pull request with: + - PR titles only (no descriptions) + - ✅ or ❌ for each test result + - Overall status: PASS or FAIL If all tests pass, add the label `smoke-claude` to the pull request. diff --git a/.github/workflows/smoke-copilot.lock.yml b/.github/workflows/smoke-copilot.lock.yml index 9c86c3b254f..ebc18627030 100644 --- a/.github/workflows/smoke-copilot.lock.yml +++ b/.github/workflows/smoke-copilot.lock.yml @@ -233,7 +233,7 @@ jobs: mkdir -p /tmp/gh-aw/safeoutputs mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs cat > /opt/gh-aw/safeoutputs/config.json << 'EOF' - {"add_comment":{"max":1},"add_labels":{"allowed":["smoke-copilot"],"max":3},"create_issue":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1}} + {"add_comment":{"max":1},"add_labels":{"allowed":["smoke-copilot"],"max":3},"create_issue":{"group":true,"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1}} EOF cat > /opt/gh-aw/safeoutputs/tools.json << 'EOF' [ @@ -645,6 +645,7 @@ jobs: GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }} GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} run: | bash /opt/gh-aw/actions/create_prompt_first.sh @@ -734,11 +735,20 @@ jobs: ## Output - Add a **very brief** comment (max 5-10 lines) to the current pull request with: - - PR titles only (no descriptions) - - ✅ or ❌ for each test result - - Overall status: PASS or FAIL - - Mention the pull request author and any assignees + 1. **Create an issue** with a summary of the smoke test run: + - Title: "Smoke Test: Copilot - __GH_AW_GITHUB_RUN_ID__" + - Body should include: + - Test results (✅ or ❌ for each test) + - Overall status: PASS or FAIL + - Run URL: __GH_AW_GITHUB_SERVER_URL__/__GH_AW_GITHUB_REPOSITORY__/actions/runs/__GH_AW_GITHUB_RUN_ID__ + - Timestamp + - Pull request author and assignees + + 2. Add a **very brief** comment (max 5-10 lines) to the current pull request with: + - PR titles only (no descriptions) + - ✅ or ❌ for each test result + - Overall status: PASS or FAIL + - Mention the pull request author and any assignees If all tests pass, add the label `smoke-copilot` to the pull request. @@ -754,6 +764,7 @@ jobs: GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }} GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} with: script: | @@ -770,6 +781,7 @@ jobs: GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_SERVER_URL: process.env.GH_AW_GITHUB_SERVER_URL, GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE } }); @@ -779,6 +791,7 @@ jobs: GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_SERVER_URL: ${{ github.server_url }} with: script: | const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); @@ -1310,7 +1323,7 @@ jobs: uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0 env: GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} - GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":1},\"add_labels\":{\"allowed\":[\"smoke-copilot\"]},\"create_issue\":{\"expires\":2,\"max\":1},\"missing_data\":{},\"missing_tool\":{}}" + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"add_comment\":{\"hide_older_comments\":true,\"max\":1},\"add_labels\":{\"allowed\":[\"smoke-copilot\"]},\"create_issue\":{\"expires\":2,\"group\":true,\"max\":1},\"missing_data\":{},\"missing_tool\":{}}" with: github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} script: | diff --git a/.github/workflows/smoke-copilot.md b/.github/workflows/smoke-copilot.md index a7b6a998b34..260e7e0a5a7 100644 --- a/.github/workflows/smoke-copilot.md +++ b/.github/workflows/smoke-copilot.md @@ -42,6 +42,7 @@ safe-outputs: hide-older-comments: true create-issue: expires: 2h + group: true add-labels: allowed: [smoke-copilot] messages: @@ -67,10 +68,19 @@ strict: true ## Output -Add a **very brief** comment (max 5-10 lines) to the current pull request with: -- PR titles only (no descriptions) -- ✅ or ❌ for each test result -- Overall status: PASS or FAIL -- Mention the pull request author and any assignees +1. **Create an issue** with a summary of the smoke test run: + - Title: "Smoke Test: Copilot - ${{ github.run_id }}" + - Body should include: + - Test results (✅ or ❌ for each test) + - Overall status: PASS or FAIL + - Run URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + - Timestamp + - Pull request author and assignees + +2. Add a **very brief** comment (max 5-10 lines) to the current pull request with: + - PR titles only (no descriptions) + - ✅ or ❌ for each test result + - Overall status: PASS or FAIL + - Mention the pull request author and any assignees If all tests pass, add the label `smoke-copilot` to the pull request. diff --git a/actions/setup/js/create_issue.cjs b/actions/setup/js/create_issue.cjs index abbeaa9badb..6932a59f89d 100644 --- a/actions/setup/js/create_issue.cjs +++ b/actions/setup/js/create_issue.cjs @@ -8,6 +8,8 @@ const { generateTemporaryId, isTemporaryId, normalizeTemporaryId, replaceTempora const { parseAllowedRepos, getDefaultTargetRepo, validateRepo, parseRepoSlug } = require("./repo_helpers.cjs"); const { removeDuplicateTitleFromDescription } = require("./remove_duplicate_title.cjs"); const { getErrorMessage } = require("./error_helpers.cjs"); +const { renderTemplate } = require("./messages_core.cjs"); +const fs = require("fs"); /** * @typedef {import('./types/handler-factory').HandlerFactoryFunction} HandlerFactoryFunction @@ -16,6 +18,166 @@ const { getErrorMessage } = require("./error_helpers.cjs"); /** @type {string} Safe output type handled by this module */ const HANDLER_TYPE = "create_issue"; +/** @type {number} Maximum number of sub-issues allowed per parent issue */ +const MAX_SUB_ISSUES_PER_PARENT = 64; + +/** @type {number} Maximum number of parent issues to check when searching */ +const MAX_PARENT_ISSUES_TO_CHECK = 10; + +/** + * Searches for an existing parent issue that can accept more sub-issues + * @param {string} owner - Repository owner + * @param {string} repo - Repository name + * @param {string} markerComment - The HTML comment marker to search for + * @returns {Promise} - Parent issue number or null if none found + */ +async function searchForExistingParent(owner, repo, markerComment) { + try { + const searchQuery = `repo:${owner}/${repo} is:issue "${markerComment}" in:body`; + const searchResults = await github.rest.search.issuesAndPullRequests({ + q: searchQuery, + per_page: MAX_PARENT_ISSUES_TO_CHECK, + sort: "created", + order: "desc", + }); + + if (searchResults.data.total_count === 0) { + return null; + } + + // Check each found issue to see if it can accept more sub-issues + for (const issue of searchResults.data.items) { + core.info(`Found potential parent issue #${issue.number}: ${issue.title}`); + + if (issue.state !== "open") { + core.info(`Parent issue #${issue.number} is ${issue.state}, skipping`); + continue; + } + + const subIssueCount = await getSubIssueCount(owner, repo, issue.number); + if (subIssueCount === null) { + continue; // Skip if we couldn't get the count + } + + if (subIssueCount < MAX_SUB_ISSUES_PER_PARENT) { + core.info(`Using existing parent issue #${issue.number} (has ${subIssueCount}/${MAX_SUB_ISSUES_PER_PARENT} sub-issues)`); + return issue.number; + } + + core.info(`Parent issue #${issue.number} is full (${subIssueCount}/${MAX_SUB_ISSUES_PER_PARENT} sub-issues), skipping`); + } + + return null; + } catch (error) { + core.warning(`Could not search for existing parent issues: ${getErrorMessage(error)}`); + return null; + } +} + +/** + * Gets the sub-issue count for a parent issue using GraphQL + * @param {string} owner - Repository owner + * @param {string} repo - Repository name + * @param {number} issueNumber - Issue number + * @returns {Promise} - Sub-issue count or null if query failed + */ +async function getSubIssueCount(owner, repo, issueNumber) { + try { + const subIssueQuery = ` + query($owner: String!, $repo: String!, $issueNumber: Int!) { + repository(owner: $owner, name: $repo) { + issue(number: $issueNumber) { + subIssues(first: 65) { + totalCount + } + } + } + } + `; + + const result = await github.graphql(subIssueQuery, { + owner, + repo, + issueNumber, + }); + + return result?.repository?.issue?.subIssues?.totalCount || 0; + } catch (error) { + core.warning(`Could not check sub-issue count for #${issueNumber}: ${getErrorMessage(error)}`); + return null; + } +} + +/** + * Finds an existing parent issue for a group, or creates a new one if needed + * @param {object} params - Parameters for finding/creating parent issue + * @param {string} params.groupId - The group identifier + * @param {string} params.owner - Repository owner + * @param {string} params.repo - Repository name + * @param {string} params.titlePrefix - Title prefix to use + * @param {string[]} params.labels - Labels to apply to parent issue + * @param {string} params.workflowName - Workflow name + * @param {string} params.workflowSourceURL - URL to the workflow source + * @returns {Promise} - Parent issue number or null if creation failed + */ +async function findOrCreateParentIssue({ groupId, owner, repo, titlePrefix, labels, workflowName, workflowSourceURL }) { + const markerComment = ``; + + // Search for existing parent issue with the group marker + core.info(`Searching for existing parent issue for group: ${groupId}`); + const existingParent = await searchForExistingParent(owner, repo, markerComment); + if (existingParent) { + return existingParent; + } + + // No suitable parent issue found, create a new one + core.info(`Creating new parent issue for group: ${groupId}`); + try { + const template = createParentIssueTemplate(groupId, titlePrefix, workflowName, workflowSourceURL); + const { data: parentIssue } = await github.rest.issues.create({ + owner, + repo, + title: template.title, + body: template.body, + labels: labels, + }); + + core.info(`Created new parent issue #${parentIssue.number}: ${parentIssue.html_url}`); + return parentIssue.number; + } catch (error) { + core.error(`Failed to create parent issue: ${getErrorMessage(error)}`); + return null; + } +} + +/** + * Creates a parent issue template for grouping sub-issues + * @param {string} groupId - The group identifier (workflow ID) + * @param {string} titlePrefix - Title prefix to use + * @param {string} workflowName - Name of the workflow + * @param {string} workflowSourceURL - URL to the workflow source + * @returns {object} - Template with title and body + */ +function createParentIssueTemplate(groupId, titlePrefix, workflowName, workflowSourceURL) { + const title = `${titlePrefix}${groupId} - Issue Group`; + + // Load issue template + const issueTemplatePath = "/opt/gh-aw/prompts/issue_group_parent.md"; + const issueTemplate = fs.readFileSync(issueTemplatePath, "utf8"); + + // Create template context + const templateContext = { + group_id: groupId, + workflow_name: workflowName, + workflow_source_url: workflowSourceURL || "#", + }; + + // Render the issue template + const body = renderTemplate(issueTemplate, templateContext); + + return { title, body }; +} + /** * Main handler factory for create_issue * Returns a message handler function that processes individual create_issue messages @@ -30,6 +192,7 @@ async function main(config = {}) { const maxCount = config.max || 10; const allowedRepos = parseAllowedRepos(config.allowed_repos); const defaultTargetRepo = getDefaultTargetRepo(config); + const groupEnabled = config.group === true || config.group === "true"; core.info(`Default target repo: ${defaultTargetRepo}`); if (allowedRepos.size > 0) { @@ -48,6 +211,9 @@ async function main(config = {}) { core.info(`Issues expire after: ${expiresHours} hours`); } core.info(`Max count: ${maxCount}`); + if (groupEnabled) { + core.info(`Issue grouping enabled: issues will be grouped as sub-issues`); + } // Track how many items we've processed for max limit let processedCount = 0; @@ -58,6 +224,9 @@ async function main(config = {}) { // Map to track temporary_id -> {repo, number} relationships across messages const temporaryIdMap = new Map(); + // Cache for parent issue per group ID + const parentIssueCache = new Map(); + // Extract triggering context for footer generation const triggeringIssueNumber = context.payload?.issue?.number && !context.payload?.issue?.pull_request ? context.payload.issue.number : undefined; const triggeringPRNumber = context.payload?.pull_request?.number || (context.payload?.issue?.pull_request ? context.payload.issue.number : undefined); @@ -274,6 +443,42 @@ async function main(config = {}) { temporaryIdMap.set(normalizeTemporaryId(temporaryId), { repo: qualifiedItemRepo, number: issue.number }); core.info(`Stored temporary ID mapping: ${temporaryId} -> ${qualifiedItemRepo}#${issue.number}`); + // Handle grouping - find or create parent issue and link sub-issue + if (groupEnabled && !effectiveParentIssueNumber) { + // Use workflow name as the group ID + const groupId = workflowName; + core.info(`Grouping enabled - finding or creating parent issue for group: ${groupId}`); + + // Check cache first + let groupParentNumber = parentIssueCache.get(groupId); + + if (!groupParentNumber) { + // Not in cache, find or create parent + groupParentNumber = await findOrCreateParentIssue({ + groupId, + owner: repoParts.owner, + repo: repoParts.repo, + titlePrefix, + labels, + workflowName, + workflowSourceURL, + }); + + if (groupParentNumber) { + // Cache the parent issue number for this group + parentIssueCache.set(groupId, groupParentNumber); + } + } + + if (groupParentNumber) { + effectiveParentIssueNumber = groupParentNumber; + effectiveParentRepo = qualifiedItemRepo; + core.info(`Using parent issue #${effectiveParentIssueNumber} for group: ${groupId}`); + } else { + core.warning(`Failed to find or create parent issue for group: ${groupId}`); + } + } + // Sub-issue linking only works within the same repository if (effectiveParentIssueNumber && effectiveParentRepo === qualifiedItemRepo) { core.info(`Attempting to link issue #${issue.number} as sub-issue of #${effectiveParentIssueNumber}`); @@ -380,4 +585,4 @@ async function main(config = {}) { }; } -module.exports = { main }; +module.exports = { main, createParentIssueTemplate, searchForExistingParent, getSubIssueCount }; diff --git a/actions/setup/js/create_issue_group.test.cjs b/actions/setup/js/create_issue_group.test.cjs new file mode 100644 index 00000000000..38bca262dff --- /dev/null +++ b/actions/setup/js/create_issue_group.test.cjs @@ -0,0 +1,242 @@ +// @ts-check +/// + +import { describe, it, expect, beforeEach, afterEach, vi } from "vitest"; +import { searchForExistingParent, getSubIssueCount } from "./create_issue.cjs"; + +describe("searchForExistingParent", () => { + let mockGithub; + let mockCore; + + beforeEach(() => { + // Create mock objects + mockCore = { + info: vi.fn(), + warning: vi.fn(), + }; + + mockGithub = { + rest: { + search: { + issuesAndPullRequests: vi.fn().mockResolvedValue({ + data: { + total_count: 0, + items: [], + }, + }), + }, + }, + graphql: vi.fn().mockResolvedValue({ + repository: { + issue: { + subIssues: { + totalCount: 0, + }, + }, + }, + }), + }; + + // Set global mocks + global.github = mockGithub; + global.core = mockCore; + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + it("should return null when no parent issues found", async () => { + const result = await searchForExistingParent("owner", "repo", ""); + + expect(result).toBeNull(); + }); + + it("should return issue number when open parent with available slots found", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { + total_count: 1, + items: [ + { + number: 42, + title: "Parent Issue", + state: "open", + }, + ], + }, + }); + + mockGithub.graphql.mockResolvedValue({ + repository: { + issue: { + subIssues: { + totalCount: 30, + }, + }, + }, + }); + + const result = await searchForExistingParent("owner", "repo", ""); + + expect(result).toBe(42); + }); + + it("should skip closed parent issues", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { + total_count: 1, + items: [ + { + number: 42, + title: "Closed Parent", + state: "closed", + }, + ], + }, + }); + + const result = await searchForExistingParent("owner", "repo", ""); + + expect(result).toBeNull(); + }); + + it("should skip full parent issues (64 sub-issues)", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { + total_count: 1, + items: [ + { + number: 42, + title: "Full Parent", + state: "open", + }, + ], + }, + }); + + mockGithub.graphql.mockResolvedValue({ + repository: { + issue: { + subIssues: { + totalCount: 64, + }, + }, + }, + }); + + const result = await searchForExistingParent("owner", "repo", ""); + + expect(result).toBeNull(); + }); + + it("should find first available parent when multiple exist", async () => { + mockGithub.rest.search.issuesAndPullRequests.mockResolvedValue({ + data: { + total_count: 3, + items: [ + { number: 1, title: "Parent 1", state: "closed" }, + { number: 2, title: "Parent 2", state: "open" }, + { number: 3, title: "Parent 3", state: "open" }, + ], + }, + }); + + let callCount = 0; + mockGithub.graphql.mockImplementation(() => { + callCount++; + return Promise.resolve({ + repository: { + issue: { + subIssues: { + totalCount: 10, + }, + }, + }, + }); + }); + + const result = await searchForExistingParent("owner", "repo", ""); + + expect(result).toBe(2); // Should skip closed parent and return first open one + }); +}); + +describe("getSubIssueCount", () => { + let mockGithub; + let mockCore; + + beforeEach(() => { + mockCore = { + warning: vi.fn(), + }; + + mockGithub = { + graphql: vi.fn().mockResolvedValue({ + repository: { + issue: { + subIssues: { + totalCount: 0, + }, + }, + }, + }), + }; + + global.github = mockGithub; + global.core = mockCore; + }); + + afterEach(() => { + vi.clearAllMocks(); + }); + + it("should return sub-issue count from GraphQL", async () => { + mockGithub.graphql.mockResolvedValue({ + repository: { + issue: { + subIssues: { + totalCount: 25, + }, + }, + }, + }); + + const result = await getSubIssueCount("owner", "repo", 42); + + expect(result).toBe(25); + }); + + it("should return 0 when no sub-issues exist", async () => { + mockGithub.graphql.mockResolvedValue({ + repository: { + issue: { + subIssues: { + totalCount: 0, + }, + }, + }, + }); + + const result = await getSubIssueCount("owner", "repo", 42); + + expect(result).toBe(0); + }); + + it("should return null when GraphQL query fails", async () => { + mockGithub.graphql.mockRejectedValue(new Error("GraphQL error")); + + const result = await getSubIssueCount("owner", "repo", 42); + + expect(result).toBeNull(); + }); + + it("should handle missing data in GraphQL response", async () => { + mockGithub.graphql.mockResolvedValue({ + repository: null, + }); + + const result = await getSubIssueCount("owner", "repo", 42); + + expect(result).toBe(0); + }); +}); diff --git a/actions/setup/md/issue_group_parent.md b/actions/setup/md/issue_group_parent.md new file mode 100644 index 00000000000..88120b19dc4 --- /dev/null +++ b/actions/setup/md/issue_group_parent.md @@ -0,0 +1,7 @@ +# {group_id} + +Parent issue for grouping related issues from [{workflow_name}]({workflow_source_url}). + + + +Sub-issues are automatically linked below (max 64 per parent). diff --git a/pkg/parser/schemas/main_workflow_schema.json b/pkg/parser/schemas/main_workflow_schema.json index 01efc55024c..af868514660 100644 --- a/pkg/parser/schemas/main_workflow_schema.json +++ b/pkg/parser/schemas/main_workflow_schema.json @@ -3685,6 +3685,11 @@ } ], "description": "Time until the issue expires and should be automatically closed. Supports integer (days) or relative time format. Minimum duration: 2 hours. When set, a maintenance workflow will be generated." + }, + "group": { + "type": "boolean", + "description": "If true, group issues as sub-issues under a parent issue. The workflow ID is used as the group identifier. Parent issues are automatically created and managed, with a maximum of 64 sub-issues per parent.", + "default": false } }, "additionalProperties": false, diff --git a/pkg/workflow/compiler_safe_outputs_config.go b/pkg/workflow/compiler_safe_outputs_config.go index 62d9cfd5e94..d71e4d52102 100644 --- a/pkg/workflow/compiler_safe_outputs_config.go +++ b/pkg/workflow/compiler_safe_outputs_config.go @@ -44,6 +44,10 @@ func (c *Compiler) addHandlerManagerConfigEnvVar(steps *[]string, data *Workflow if cfg.TargetRepoSlug != "" { handlerConfig["target-repo"] = cfg.TargetRepoSlug } + // Add group flag to config + if cfg.Group { + handlerConfig["group"] = true + } config["create_issue"] = handlerConfig } diff --git a/pkg/workflow/create_issue.go b/pkg/workflow/create_issue.go index 78a905cad5b..d83aceb8ff5 100644 --- a/pkg/workflow/create_issue.go +++ b/pkg/workflow/create_issue.go @@ -18,6 +18,7 @@ type CreateIssuesConfig struct { TargetRepoSlug string `yaml:"target-repo,omitempty"` // Target repository in format "owner/repo" for cross-repository issues AllowedRepos []string `yaml:"allowed-repos,omitempty"` // List of additional repositories that issues can be created in Expires int `yaml:"expires,omitempty"` // Hours until the issue expires and should be automatically closed + Group bool `yaml:"group,omitempty"` // If true, group issues as sub-issues under a parent issue (workflow ID is used as group identifier) } // parseIssuesConfig handles create-issue configuration @@ -148,6 +149,12 @@ func (c *Compiler) buildCreateOutputIssueJob(data *WorkflowData, mainJobName str customEnvVars = append(customEnvVars, fmt.Sprintf(" GH_AW_ISSUE_EXPIRES: \"%d\"\n", data.SafeOutputs.CreateIssues.Expires)) } + // Add group flag if set + if data.SafeOutputs.CreateIssues.Group { + customEnvVars = append(customEnvVars, " GH_AW_ISSUE_GROUP: \"true\"\n") + createIssueLog.Print("Issue grouping enabled - issues will be grouped as sub-issues under parent") + } + // Add standard environment variables (metadata + staged/target repo) customEnvVars = append(customEnvVars, c.buildStandardSafeOutputEnvVars(data, data.SafeOutputs.CreateIssues.TargetRepoSlug)...) diff --git a/pkg/workflow/create_issue_group_test.go b/pkg/workflow/create_issue_group_test.go new file mode 100644 index 00000000000..6764b585807 --- /dev/null +++ b/pkg/workflow/create_issue_group_test.go @@ -0,0 +1,248 @@ +package workflow + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "github.com/githubnext/gh-aw/pkg/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestCreateIssueGroupFieldParsing verifies that the group field is parsed correctly +func TestCreateIssueGroupFieldParsing(t *testing.T) { + tests := []struct { + name string + frontmatter string + expectedGroup bool + }{ + { + name: "group enabled with true", + frontmatter: `--- +name: Test Workflow +on: workflow_dispatch +permissions: + contents: read +engine: copilot +safe-outputs: + create-issue: + max: 3 + group: true +--- + +Test content`, + expectedGroup: true, + }, + { + name: "group disabled with false", + frontmatter: `--- +name: Test Workflow +on: workflow_dispatch +permissions: + contents: read +engine: copilot +safe-outputs: + create-issue: + max: 3 + group: false +--- + +Test content`, + expectedGroup: false, + }, + { + name: "group not specified defaults to false", + frontmatter: `--- +name: Test Workflow +on: workflow_dispatch +permissions: + contents: read +engine: copilot +safe-outputs: + create-issue: + max: 3 +--- + +Test content`, + expectedGroup: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + tmpDir := testutil.TempDir(t, "group-test") + testFile := filepath.Join(tmpDir, "test-workflow.md") + require.NoError(t, os.WriteFile(testFile, []byte(tt.frontmatter), 0644)) + + compiler := NewCompiler(false, "", "test") + require.NoError(t, compiler.CompileWorkflow(testFile)) + + // Parse the workflow to check the config + data, err := compiler.ParseWorkflowFile(testFile) + require.NoError(t, err) + + require.NotNil(t, data.SafeOutputs) + require.NotNil(t, data.SafeOutputs.CreateIssues) + assert.Equal(t, tt.expectedGroup, data.SafeOutputs.CreateIssues.Group, "Group field should match expected value") + }) + } +} + +// TestCreateIssueGroupInHandlerConfig verifies that the group flag is passed to the handler config JSON +func TestCreateIssueGroupInHandlerConfig(t *testing.T) { + tmpDir := testutil.TempDir(t, "handler-config-group-test") + + testContent := `--- +name: Test Handler Config Group +on: workflow_dispatch +permissions: + contents: read +engine: copilot +safe-outputs: + create-issue: + max: 2 + group: true + labels: [test-group] +--- + +Create test issues with grouping. +` + + testFile := filepath.Join(tmpDir, "test-group-handler.md") + require.NoError(t, os.WriteFile(testFile, []byte(testContent), 0644)) + + // Compile the workflow + compiler := NewCompiler(false, "", "test") + require.NoError(t, compiler.CompileWorkflow(testFile)) + + // Read the compiled output + outputFile := filepath.Join(tmpDir, "test-group-handler.lock.yml") + compiledContent, err := os.ReadFile(outputFile) + require.NoError(t, err) + + compiledStr := string(compiledContent) + + // Verify GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG contains the group flag + require.Contains(t, compiledStr, "GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG", "Expected handler config in compiled workflow") + + // Extract and verify the JSON contains group: true + require.Contains(t, compiledStr, `"group":true`, "Expected group flag in handler config JSON") +} + +// TestCreateIssueGroupWithoutPermissions verifies compilation with group field and no issues permission +func TestCreateIssueGroupWithoutPermissions(t *testing.T) { + tmpDir := testutil.TempDir(t, "group-no-permission-test") + + testContent := `--- +name: Test Group No Permission +on: workflow_dispatch +permissions: + contents: read +engine: copilot +safe-outputs: + create-issue: + max: 5 + group: true +--- + +Test grouping without explicit issues permission. +` + + testFile := filepath.Join(tmpDir, "test-group-no-perm.md") + require.NoError(t, os.WriteFile(testFile, []byte(testContent), 0644)) + + // Compile the workflow - should succeed (safe-outputs doesn't require explicit permission) + compiler := NewCompiler(false, "", "test") + require.NoError(t, compiler.CompileWorkflow(testFile)) + + // Read the compiled output + outputFile := filepath.Join(tmpDir, "test-group-no-perm.lock.yml") + compiledContent, err := os.ReadFile(outputFile) + require.NoError(t, err) + + compiledStr := string(compiledContent) + + // Verify the workflow compiled and contains the group flag + require.Contains(t, compiledStr, "GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG") + require.Contains(t, compiledStr, `"group":true`) +} + +// TestCreateIssueGroupWithTitlePrefix verifies group field works with title-prefix +func TestCreateIssueGroupWithTitlePrefix(t *testing.T) { + tmpDir := testutil.TempDir(t, "group-title-prefix-test") + + testContent := `--- +name: Test Group Title Prefix +on: workflow_dispatch +permissions: + contents: read +engine: copilot +safe-outputs: + create-issue: + max: 3 + group: true + title-prefix: "[Bot] " + labels: [automated, grouped] +--- + +Test grouping with title prefix. +` + + testFile := filepath.Join(tmpDir, "test-group-prefix.md") + require.NoError(t, os.WriteFile(testFile, []byte(testContent), 0644)) + + // Compile the workflow + compiler := NewCompiler(false, "", "test") + require.NoError(t, compiler.CompileWorkflow(testFile)) + + // Read the compiled output + outputFile := filepath.Join(tmpDir, "test-group-prefix.lock.yml") + compiledContent, err := os.ReadFile(outputFile) + require.NoError(t, err) + + compiledStr := string(compiledContent) + + // Verify both group and title_prefix are in the handler config + assert.True(t, strings.Contains(compiledStr, `"group":true`), "Expected group:true in compiled workflow") + assert.True(t, strings.Contains(compiledStr, `title_prefix`), "Expected title_prefix in compiled workflow") +} + +// TestCreateIssueGroupInMCPConfig verifies group flag is passed to MCP config +func TestCreateIssueGroupInMCPConfig(t *testing.T) { + tmpDir := testutil.TempDir(t, "group-mcp-config-test") + + testContent := `--- +name: Test Group MCP Config +on: workflow_dispatch +permissions: + contents: read +engine: copilot +safe-outputs: + create-issue: + max: 1 + group: true +--- + +Test MCP config with group. +` + + testFile := filepath.Join(tmpDir, "test-group-mcp.md") + require.NoError(t, os.WriteFile(testFile, []byte(testContent), 0644)) + + // Compile the workflow + compiler := NewCompiler(false, "", "test") + require.NoError(t, compiler.CompileWorkflow(testFile)) + + // Read the compiled output + outputFile := filepath.Join(tmpDir, "test-group-mcp.lock.yml") + compiledContent, err := os.ReadFile(outputFile) + require.NoError(t, err) + + compiledStr := string(compiledContent) + + // The group flag should be in handler config + require.Contains(t, compiledStr, "GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG", "Should have handler config") + require.Contains(t, compiledStr, `"group":true`, "Group flag should be in handler config") +} diff --git a/pkg/workflow/safe_outputs_config_generation.go b/pkg/workflow/safe_outputs_config_generation.go index 65ff828512c..c246883c15e 100644 --- a/pkg/workflow/safe_outputs_config_generation.go +++ b/pkg/workflow/safe_outputs_config_generation.go @@ -25,11 +25,16 @@ func generateSafeOutputsConfig(data *WorkflowData) string { // Handle safe-outputs configuration if present if data.SafeOutputs != nil { if data.SafeOutputs.CreateIssues != nil { - safeOutputsConfig["create_issue"] = generateMaxWithAllowedLabelsConfig( + config := generateMaxWithAllowedLabelsConfig( data.SafeOutputs.CreateIssues.Max, 1, // default max data.SafeOutputs.CreateIssues.AllowedLabels, ) + // Add group flag if enabled + if data.SafeOutputs.CreateIssues.Group { + config["group"] = true + } + safeOutputsConfig["create_issue"] = config } if data.SafeOutputs.CreateAgentSessions != nil { safeOutputsConfig["create_agent_task"] = generateMaxConfig(