diff --git a/src/artifact_providers/__tests__/github.test.ts b/src/artifact_providers/__tests__/github.test.ts index 69b68e75..33ca491a 100644 --- a/src/artifact_providers/__tests__/github.test.ts +++ b/src/artifact_providers/__tests__/github.test.ts @@ -27,25 +27,34 @@ class TestGitHubArtifactProvider extends GitHubArtifactProvider { } public testSearchForRevisionArtifact( revision: string, - getRevisionDate: lazyRequestCallback + getRevisionDate: lazyRequestCallback, ): Promise { return this.searchForRevisionArtifact(revision, getRevisionDate); } - public testGetWorkflowRunsForCommit(revision: string): Promise { + public testGetWorkflowRunsForCommit( + revision: string, + ): Promise { return this.getWorkflowRunsForCommit(revision); } public testFilterWorkflowRuns( runs: WorkflowRun[], - filters: NormalizedArtifactFilter[] + filters: NormalizedArtifactFilter[], ): WorkflowRun[] { return this.filterWorkflowRuns(runs, filters); } public testGetArtifactsFromWorkflowRuns( runs: WorkflowRun[], - filters: NormalizedArtifactFilter[] + filters: NormalizedArtifactFilter[], ): Promise { return this.getArtifactsFromWorkflowRuns(runs, filters); } + public testValidateAllPatternsMatched( + filters: NormalizedArtifactFilter[], + allRuns: WorkflowRun[], + matchingArtifacts: ArtifactItem[], + ): string[] { + return this.validateAllPatternsMatched(filters, allRuns, matchingArtifacts); + } } vi.mock('../../utils/async'); @@ -75,10 +84,9 @@ describe('GitHub Artifact Provider', () => { }, git: { getCommit: vi.fn() }, }; - ( - getGitHubClient as MockedFunction + (getGitHubClient as MockedFunction) // @ts-ignore we only need to mock a subset - ).mockReturnValueOnce(mockClient); + .mockReturnValueOnce(mockClient); githubArtifactProvider = new TestGitHubArtifactProvider({ name: 'github-test', @@ -131,7 +139,9 @@ describe('GitHub Artifact Provider', () => { expect(result).toHaveLength(1); expect(result[0].workflow).toBeUndefined(); expect(result[0].artifacts).toHaveLength(1); - expect(result[0].artifacts[0].test('sentry-browser-7.0.0.tgz')).toBe(true); + expect(result[0].artifacts[0].test('sentry-browser-7.0.0.tgz')).toBe( + true, + ); }); test('normalizes array config to single filter with multiple patterns', () => { @@ -142,7 +152,9 @@ describe('GitHub Artifact Provider', () => { expect(result).toHaveLength(1); expect(result[0].workflow).toBeUndefined(); expect(result[0].artifacts).toHaveLength(2); - expect(result[0].artifacts[0].test('sentry-browser-7.0.0.tgz')).toBe(true); + expect(result[0].artifacts[0].test('sentry-browser-7.0.0.tgz')).toBe( + true, + ); expect(result[0].artifacts[1].test('release-bundle')).toBe(true); expect(result[0].artifacts[1].test('release-bundle-extra')).toBe(false); }); @@ -155,13 +167,13 @@ describe('GitHub Artifact Provider', () => { expect(result).toHaveLength(2); // First filter: build -> release-artifacts - expect(result[0].workflow?.test('build')).toBe(true); - expect(result[0].workflow?.test('build-linux')).toBe(false); + expect(result[0].workflow!.test('build')).toBe(true); + expect(result[0].workflow!.test('build-linux')).toBe(false); expect(result[0].artifacts).toHaveLength(1); expect(result[0].artifacts[0].test('release-artifacts')).toBe(true); // Second filter: ci -> [output, bundle] - expect(result[1].workflow?.test('ci')).toBe(true); + expect(result[1].workflow!.test('ci')).toBe(true); expect(result[1].artifacts).toHaveLength(2); expect(result[1].artifacts[0].test('output')).toBe(true); expect(result[1].artifacts[1].test('bundle')).toBe(true); @@ -175,14 +187,14 @@ describe('GitHub Artifact Provider', () => { expect(result).toHaveLength(2); // First filter: /^build-.*$/ -> /^output-.*$/ - expect(result[0].workflow?.test('build-linux')).toBe(true); - expect(result[0].workflow?.test('build-macos')).toBe(true); - expect(result[0].workflow?.test('test-linux')).toBe(false); + expect(result[0].workflow!.test('build-linux')).toBe(true); + expect(result[0].workflow!.test('build-macos')).toBe(true); + expect(result[0].workflow!.test('test-linux')).toBe(false); expect(result[0].artifacts[0].test('output-x86')).toBe(true); expect(result[0].artifacts[0].test('output-arm')).toBe(true); // Second filter: /^release-.*$/ -> [/^dist-.*$/, checksums] - expect(result[1].workflow?.test('release-production')).toBe(true); + expect(result[1].workflow!.test('release-production')).toBe(true); expect(result[1].artifacts).toHaveLength(2); expect(result[1].artifacts[0].test('dist-linux')).toBe(true); expect(result[1].artifacts[1].test('checksums')).toBe(true); @@ -227,8 +239,8 @@ describe('GitHub Artifact Provider', () => { }); await expect( githubArtifactProvider.testGetRevisionArtifact( - '1b843f2cbb20fdda99ef749e29e75e43e6e43b38' - ) + '1b843f2cbb20fdda99ef749e29e75e43e6e43b38', + ), ).resolves.toMatchInlineSnapshot(` { "archive_download_url": "https://api.github.com/repos/getsentry/craft/actions/artifacts/60233710/zip", @@ -300,8 +312,8 @@ describe('GitHub Artifact Provider', () => { }); await expect( githubArtifactProvider.testGetRevisionArtifact( - '1b843f2cbb20fdda99ef749e29e75e43e6e43b38' - ) + '1b843f2cbb20fdda99ef749e29e75e43e6e43b38', + ), ).resolves.toMatchInlineSnapshot(` { "archive_download_url": "https://api.github.com/repos/getsentry/craft/actions/artifacts/60233710/zip", @@ -356,8 +368,8 @@ describe('GitHub Artifact Provider', () => { }); await expect( githubArtifactProvider.testGetRevisionArtifact( - '1b843f2cbb20fdda99ef749e29e75e43e6e43b38' - ) + '1b843f2cbb20fdda99ef749e29e75e43e6e43b38', + ), ).resolves.toMatchInlineSnapshot(` { "archive_download_url": "https://api.github.com/repos/getsentry/craft/actions/artifacts/60233710/zip", @@ -385,10 +397,10 @@ describe('GitHub Artifact Provider', () => { await expect( githubArtifactProvider.testGetRevisionArtifact( - '1b843f2cbb20fdda99ef749e29e75e43e6e43b38' - ) + '1b843f2cbb20fdda99ef749e29e75e43e6e43b38', + ), ).rejects.toThrowErrorMatchingInlineSnapshot( - `[Error: Can't find any artifacts for revision "1b843f2cbb20fdda99ef749e29e75e43e6e43b38" (tries: 3)]` + `[Error: Can't find any artifacts for revision "1b843f2cbb20fdda99ef749e29e75e43e6e43b38" (tries: 3)]`, ); expect(mockClient.actions.listArtifactsForRepo).toBeCalledTimes(3); @@ -432,10 +444,10 @@ describe('GitHub Artifact Provider', () => { }); await expect( githubArtifactProvider.testGetRevisionArtifact( - '3c2e87573d3bd16f61cf08fece0638cc47a4fc22' - ) + '3c2e87573d3bd16f61cf08fece0638cc47a4fc22', + ), ).rejects.toThrowErrorMatchingInlineSnapshot( - `[Error: Can't find any artifacts for revision "3c2e87573d3bd16f61cf08fece0638cc47a4fc22" (tries: 3)]` + `[Error: Can't find any artifacts for revision "3c2e87573d3bd16f61cf08fece0638cc47a4fc22" (tries: 3)]`, ); expect(sleep).toBeCalledTimes(2); }); @@ -497,8 +509,8 @@ describe('GitHub Artifact Provider', () => { '1b843f2cbb20fdda99ef749e29e75e43e6e43b38', lazyRequest(() => { return getRevisionDateCallback(); - }) - ) + }), + ), ).resolves.toMatchInlineSnapshot(` { "archive_download_url": "https://api.github.com/repos/getsentry/craft/actions/artifacts/60233710/zip", @@ -591,8 +603,8 @@ describe('GitHub Artifact Provider', () => { await expect( githubArtifactProvider.testSearchForRevisionArtifact( '1b843f2cbb20fdda99ef749e29e75e43e6e43b38', - lazyRequest(getRevisionDateCallback) - ) + lazyRequest(getRevisionDateCallback), + ), ).resolves.toMatchInlineSnapshot(`null`); expect(mockClient.actions.listArtifactsForRepo).toBeCalledTimes(3); expect(getRevisionDateCallback).toBeCalledTimes(1); @@ -631,8 +643,8 @@ describe('GitHub Artifact Provider', () => { '1b843f2cbb20fdda99ef749e29e75e43e6e43b38', lazyRequest(() => { return getRevisionDateCallback(); - }) - ) + }), + ), ).resolves.toMatchInlineSnapshot(`null`); expect(mockClient.actions.listArtifactsForRepo).toBeCalledTimes(1); expect(getRevisionDateCallback).toBeCalledTimes(1); @@ -652,9 +664,8 @@ describe('GitHub Artifact Provider', () => { }, }); - const runs = await githubArtifactProvider.testGetWorkflowRunsForCommit( - 'abc123' - ); + const runs = + await githubArtifactProvider.testGetWorkflowRunsForCommit('abc123'); expect(runs).toHaveLength(2); expect(runs[0].name).toBe('Build & Test'); @@ -697,9 +708,8 @@ describe('GitHub Artifact Provider', () => { }, }); - const runs = await githubArtifactProvider.testGetWorkflowRunsForCommit( - 'abc123' - ); + const runs = + await githubArtifactProvider.testGetWorkflowRunsForCommit('abc123'); expect(runs).toHaveLength(105); expect(mockClient.actions.listWorkflowRunsForRepo).toBeCalledTimes(2); @@ -721,7 +731,7 @@ describe('GitHub Artifact Provider', () => { const result = githubArtifactProvider.testFilterWorkflowRuns( mockRuns, - filters + filters, ); expect(result).toHaveLength(4); }); @@ -733,7 +743,7 @@ describe('GitHub Artifact Provider', () => { const result = githubArtifactProvider.testFilterWorkflowRuns( mockRuns, - filters + filters, ); expect(result).toHaveLength(1); expect(result[0].name).toBe('Build & Test'); @@ -746,7 +756,7 @@ describe('GitHub Artifact Provider', () => { const result = githubArtifactProvider.testFilterWorkflowRuns( mockRuns, - filters + filters, ); expect(result).toHaveLength(2); expect(result.map(r => r.name)).toEqual(['build-linux', 'build-macos']); @@ -760,7 +770,7 @@ describe('GitHub Artifact Provider', () => { const result = githubArtifactProvider.testFilterWorkflowRuns( mockRuns, - filters + filters, ); expect(result).toHaveLength(2); expect(result.map(r => r.name)).toEqual(['Build & Test', 'Lint']); @@ -800,11 +810,14 @@ describe('GitHub Artifact Provider', () => { const artifacts = await githubArtifactProvider.testGetArtifactsFromWorkflowRuns( mockRuns, - filters + filters, ); expect(artifacts).toHaveLength(2); - expect(artifacts.map(a => a.name)).toEqual(['craft-binary', 'craft-docs']); + expect(artifacts.map(a => a.name)).toEqual([ + 'craft-binary', + 'craft-docs', + ]); }); test('matches artifacts without workflow filter (all workflows)', async () => { @@ -836,7 +849,7 @@ describe('GitHub Artifact Provider', () => { const artifacts = await githubArtifactProvider.testGetArtifactsFromWorkflowRuns( mockRuns, - filters + filters, ); expect(artifacts).toHaveLength(2); @@ -866,10 +879,249 @@ describe('GitHub Artifact Provider', () => { const artifacts = await githubArtifactProvider.testGetArtifactsFromWorkflowRuns( mockRuns, - filters + filters, ); expect(artifacts).toHaveLength(1); }); }); + + describe('normalizeArtifactsConfig with glob patterns', () => { + test('glob pattern in artifact names matches correctly', () => { + const result = normalizeArtifactsConfig({ + Build: 'craft-*', + }); + expect(result).toHaveLength(1); + expect(result[0].artifacts).toHaveLength(1); + expect(result[0].artifacts[0].test('craft-binary')).toBe(true); + expect(result[0].artifacts[0].test('craft-docs')).toBe(true); + expect(result[0].artifacts[0].test('other-artifact')).toBe(false); + }); + + test('glob pattern in workflow names matches correctly', () => { + const result = normalizeArtifactsConfig({ + 'Build*': 'output', + }); + expect(result).toHaveLength(1); + expect(result[0].workflow!.test('Build')).toBe(true); + expect(result[0].workflow!.test('Build & Test')).toBe(true); + expect(result[0].workflow!.test('Lint')).toBe(false); + }); + + test('mixed glob and exact patterns', () => { + const result = normalizeArtifactsConfig({ + 'Build & Test': ['craft-*', 'exact-name'], + }); + expect(result).toHaveLength(1); + expect(result[0].artifacts).toHaveLength(2); + expect(result[0].artifacts[0].test('craft-binary')).toBe(true); + expect(result[0].artifacts[0].test('craft-docs')).toBe(true); + expect(result[0].artifacts[1].test('exact-name')).toBe(true); + expect(result[0].artifacts[1].test('exact-name-extra')).toBe(false); + }); + }); + + describe('validateAllPatternsMatched', () => { + const mockRuns = [ + { id: 1, name: 'Build & Test' }, + { id: 2, name: 'Lint' }, + ] as WorkflowRun[]; + + test('returns no errors when all patterns have matches', () => { + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^Build & Test$/, + artifacts: [/^craft-binary$/, /^craft-docs$/], + }, + ]; + const artifacts = [ + { id: 101, name: 'craft-binary', workflow_run: { id: 1 } }, + { id: 102, name: 'craft-docs', workflow_run: { id: 1 } }, + ] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors).toEqual([]); + }); + + test('returns error when an artifact pattern has no match', () => { + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^Build & Test$/, + artifacts: [/^craft-binary$/, /^craft-docs$/], + }, + ]; + // Only craft-binary exists, craft-docs is missing + const artifacts = [ + { id: 101, name: 'craft-binary', workflow_run: { id: 1 } }, + ] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors).toHaveLength(1); + expect(errors[0]).toMatch(/Artifact pattern.*craft-docs.*did not match/); + }); + + test('returns error when a workflow pattern has no match', () => { + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^Release$/, + artifacts: [/^output$/], + }, + ]; + const artifacts = [ + { id: 101, name: 'output', workflow_run: { id: 1 } }, + ] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors).toHaveLength(1); + expect(errors[0]).toMatch( + /Workflow pattern.*Release.*did not match any workflow runs/, + ); + }); + + test('includes available names in error message for artifacts', () => { + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^Build & Test$/, + artifacts: [/^craft-binary$/, /^missing-artifact$/], + }, + ]; + const artifacts = [ + { id: 101, name: 'craft-binary', workflow_run: { id: 1 } }, + ] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors.join('\n')).toMatch( + /Available artifact names: craft-binary/, + ); + }); + + test('includes available names in error message for workflows', () => { + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^NonExistent$/, + artifacts: [/^output$/], + }, + ]; + const artifacts = [] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors.join('\n')).toMatch( + /Available workflows: Build & Test, Lint/, + ); + }); + + test('reports multiple errors at once', () => { + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^Build & Test$/, + artifacts: [/^missing-one$/, /^missing-two$/], + }, + ]; + const artifacts = [ + { id: 101, name: 'craft-binary', workflow_run: { id: 1 } }, + ] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors).toHaveLength(2); + expect(errors.join('\n')).toMatch(/missing-one/); + expect(errors.join('\n')).toMatch(/missing-two/); + }); + + test('skips artifact validation for unmatched workflow', () => { + // When a workflow pattern doesn't match, we report the workflow error + // but don't also report artifact errors for that filter + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^NonExistent$/, + artifacts: [/^should-not-report$/], + }, + ]; + const artifacts = [] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors).toHaveLength(1); + expect(errors[0]).toMatch(/Workflow pattern/); + expect(errors[0]).not.toMatch(/should-not-report/); + }); + + test('returns no errors with no workflow filter (matches all runs)', () => { + const filters: NormalizedArtifactFilter[] = [ + { + workflow: undefined, + artifacts: [/^craft-binary$/], + }, + ]; + const artifacts = [ + { id: 101, name: 'craft-binary', workflow_run: { id: 1 } }, + ] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + mockRuns, + artifacts, + ); + expect(errors).toEqual([]); + }); + + test('scopes artifact validation to matching workflow runs', () => { + // Two workflows produce different artifacts. Workflow A's artifact + // should not satisfy workflow B's artifact pattern. + const filters: NormalizedArtifactFilter[] = [ + { + workflow: /^Build$/, + artifacts: [/^build-output$/], + }, + { + workflow: /^Release$/, + artifacts: [/^release-output$/], + }, + ]; + const runs = [ + { id: 10, name: 'Build' }, + { id: 20, name: 'Release' }, + ] as WorkflowRun[]; + // build-output comes from Build, release-output is missing from Release + const artifacts = [ + { id: 101, name: 'build-output', workflow_run: { id: 10 } }, + ] as ArtifactItem[]; + + const errors = githubArtifactProvider.testValidateAllPatternsMatched( + filters, + runs, + artifacts, + ); + expect(errors).toHaveLength(1); + expect(errors[0]).toMatch(/release-output/); + expect(errors[0]).toMatch(/from workflow/); + expect(errors[0]).toMatch(/Release/); + }); + }); }); diff --git a/src/artifact_providers/github.ts b/src/artifact_providers/github.ts index 51026f57..a89bc297 100644 --- a/src/artifact_providers/github.ts +++ b/src/artifact_providers/github.ts @@ -44,7 +44,7 @@ export interface NormalizedArtifactFilter { * Normalizes artifact patterns from string/array format to array of RegExp */ function normalizeArtifactPatterns( - patterns: string | string[] | undefined + patterns: string | string[] | undefined, ): RegExp[] { if (!patterns) { return []; @@ -60,18 +60,22 @@ function normalizeArtifactPatterns( * @returns Array of normalized filter objects */ export function normalizeArtifactsConfig( - config: GitHubArtifactsConfig + config: GitHubArtifactsConfig, ): NormalizedArtifactFilter[] { if (!config) { return []; } if (typeof config === 'string') { - return [{ workflow: undefined, artifacts: normalizeArtifactPatterns(config) }]; + return [ + { workflow: undefined, artifacts: normalizeArtifactPatterns(config) }, + ]; } if (Array.isArray(config)) { - return [{ workflow: undefined, artifacts: normalizeArtifactPatterns(config) }]; + return [ + { workflow: undefined, artifacts: normalizeArtifactPatterns(config) }, + ]; } const filters: NormalizedArtifactFilter[] = []; @@ -101,11 +105,11 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { */ public async doDownloadArtifact( artifact: RemoteArtifact, - downloadDirectory: string + downloadDirectory: string, ): Promise { const destination = path.join(downloadDirectory, artifact.filename); this.logger.debug( - `rename ${artifact.storedFile.downloadFilepath} to ${destination}` + `rename ${artifact.storedFile.downloadFilepath} to ${destination}`, ); fs.renameSync(artifact.storedFile.downloadFilepath, destination); return destination; @@ -120,13 +124,13 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { */ protected async searchForRevisionArtifact( revision: string, - getRevisionDate: lazyRequestCallback + getRevisionDate: lazyRequestCallback, ): Promise { const { repoName: repo, repoOwner: owner } = this.config; const per_page = 100; this.logger.debug( - `Searching GitHub artifacts for ${owner}/${repo}, revision ${revision}` + `Searching GitHub artifacts for ${owner}/${repo}, revision ${revision}`, ); let checkNextPage = true; @@ -148,7 +152,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { // There is no public documentation on this but the observed data and // common-sense logic suggests that this is a reasonably safe assumption. const foundArtifact = artifacts.find( - artifact => artifact.name === revision + artifact => artifact.name === revision, ); if (foundArtifact) { this.logger.trace(`Found artifact on page ${page}:`, foundArtifact); @@ -199,12 +203,12 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { this.logger.info( `Fetching GitHub artifacts for ${owner}/${repo}, revision ${revision} (attempt ${ tries + 1 - } of ${MAX_TRIES})` + } of ${MAX_TRIES})`, ); artifact = await this.searchForRevisionArtifact( revision, - getRevisionDate + getRevisionDate, ); if (artifact) { return artifact; @@ -217,21 +221,23 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { this.logger.info( `Waiting ${ ARTIFACTS_POLLING_INTERVAL / MILLISECONDS - } seconds for artifacts to become available via GitHub API...` + } seconds for artifacts to become available via GitHub API...`, ); await sleep(ARTIFACTS_POLLING_INTERVAL); } } throw new Error( - `Can't find any artifacts for revision "${revision}" (tries: ${MAX_TRIES})` + `Can't find any artifacts for revision "${revision}" (tries: ${MAX_TRIES})`, ); } /** * Downloads and unpacks a single GitHub artifact */ - private async downloadAndUnpackArtifacts(url: string): Promise { + private async downloadAndUnpackArtifacts( + url: string, + ): Promise { const tempFile = await this.downloadToTempFile(url); this.logger.info(`Finished downloading.`); return this.unpackArtifact(tempFile); @@ -242,7 +248,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { * @param foundArtifact */ private async getArchiveDownloadUrl( - foundArtifact: ArtifactItem + foundArtifact: ArtifactItem, ): Promise { const { repoName, repoOwner } = this.config; @@ -263,12 +269,12 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { * @returns Array of workflow runs for the commit */ protected async getWorkflowRunsForCommit( - revision: string + revision: string, ): Promise { const { repoName: repo, repoOwner: owner } = this.config; this.logger.debug( - `Fetching workflow runs for commit ${revision} from ${owner}/${repo}` + `Fetching workflow runs for commit ${revision} from ${owner}/${repo}`, ); const runs: WorkflowRun[] = []; @@ -303,7 +309,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { */ protected filterWorkflowRuns( runs: WorkflowRun[], - filters: NormalizedArtifactFilter[] + filters: NormalizedArtifactFilter[], ): WorkflowRun[] { // If no filters have workflow patterns, return all runs const hasWorkflowFilters = filters.some(f => f.workflow !== undefined); @@ -314,7 +320,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { return runs.filter(run => { const workflowName = run.name ?? ''; return filters.some( - filter => !filter.workflow || filter.workflow.test(workflowName) + filter => !filter.workflow || filter.workflow.test(workflowName), ); }); } @@ -324,7 +330,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { */ private getApplicablePatterns( workflowName: string, - filters: NormalizedArtifactFilter[] + filters: NormalizedArtifactFilter[], ): RegExp[] { const patterns: RegExp[] = []; for (const filter of filters) { @@ -340,7 +346,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { */ protected async getArtifactsFromWorkflowRuns( runs: WorkflowRun[], - filters: NormalizedArtifactFilter[] + filters: NormalizedArtifactFilter[], ): Promise { const { repoName: repo, repoOwner: owner } = this.config; const matchingArtifacts: ArtifactItem[] = []; @@ -354,7 +360,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { } this.logger.debug( - `Fetching artifacts for workflow run "${workflowName}" (ID: ${run.id})` + `Fetching artifacts for workflow run "${workflowName}" (ID: ${run.id})`, ); const per_page = 100; @@ -374,7 +380,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { const matches = patterns.some(pattern => pattern.test(artifact.name)); if (matches) { this.logger.debug( - `Artifact "${artifact.name}" matches filter from workflow "${workflowName}"` + `Artifact "${artifact.name}" matches filter from workflow "${workflowName}"`, ); seenArtifactIds.add(artifact.id); matchingArtifacts.push(artifact); @@ -400,25 +406,27 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { const response = await fetch(url); if (!response.ok) { throw new Error( - `Unexpected HTTP response from ${url}: ${response.status} (${response.statusText})` + `Unexpected HTTP response from ${url}: ${response.status} (${response.statusText})`, ); } await new Promise((resolve, reject) => response.body .pipe(fs.createWriteStream(tempFilepath)) .on('finish', () => resolve()) - .on('error', reject) + .on('error', reject), ); return tempFilepath; }, - false // Don't cleanup - we'll handle it after unpacking + false, // Don't cleanup - we'll handle it after unpacking ); } /** * Unpacks a downloaded zip file and returns the artifacts */ - private async unpackArtifact(tempFilepath: string): Promise { + private async unpackArtifact( + tempFilepath: string, + ): Promise { const artifacts: RemoteArtifact[] = []; await withTempDir(async tmpDir => { this.logger.debug(`Extracting "${tempFilepath}" to "${tmpDir}"...`); @@ -447,7 +455,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { * 2. Unpack all artifacts in parallel */ protected async downloadArtifactsInParallel( - artifactItems: ArtifactItem[] + artifactItems: ArtifactItem[], ): Promise { const limit = pLimit(DOWNLOAD_CONCURRENCY); @@ -458,7 +466,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { this.logger.debug(`Downloading artifact "${item.name}"...`); const url = await this.getArchiveDownloadUrl(item); return this.downloadToTempFile(url); - }) + }), ); const tempFiles = await Promise.all(downloadPromises); this.logger.info(`Downloaded ${tempFiles.length} artifacts.`); @@ -467,18 +475,18 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { // Use allSettled to ensure we can clean up all temp files even if some unpacking fails this.logger.debug(`Unpacking ${tempFiles.length} artifacts...`); const unpackPromises = tempFiles.map(tempFile => - limit(() => this.unpackArtifact(tempFile)) + limit(() => this.unpackArtifact(tempFile)), ); const results = await Promise.allSettled(unpackPromises); // Collect successful unpacks and clean up failed ones const allArtifacts: RemoteArtifact[] = []; const errors: Error[] = []; - + for (let i = 0; i < results.length; i++) { const result = results[i]; const tempFile = tempFiles[i]; - + if (result.status === 'fulfilled') { allArtifacts.push(...result.value); } else { @@ -487,10 +495,15 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { try { if (fs.existsSync(tempFile)) { fs.unlinkSync(tempFile); - this.logger.debug(`Cleaned up temp file after failed unpack: ${tempFile}`); + this.logger.debug( + `Cleaned up temp file after failed unpack: ${tempFile}`, + ); } } catch (cleanupError) { - this.logger.warn(`Failed to clean up temp file ${tempFile}:`, cleanupError); + this.logger.warn( + `Failed to clean up temp file ${tempFile}:`, + cleanupError, + ); } } } @@ -499,13 +512,78 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { if (errors.length > 0) { const errorMessages = errors.map(e => e.message).join('; '); throw new Error( - `Failed to unpack ${errors.length} of ${tempFiles.length} artifacts: ${errorMessages}` + `Failed to unpack ${errors.length} of ${tempFiles.length} artifacts: ${errorMessages}`, ); } return allArtifacts; } + /** + * Validates that every configured workflow pattern matched at least one + * workflow run, and every artifact pattern matched at least one artifact. + * + * This catches configuration issues early with clear error messages instead + * of letting them surface as confusing per-target failures later. + * + * @param filters Normalized artifact filters from config + * @param allRuns All workflow runs found for the commit + * @param matchingArtifacts Artifacts that matched at least one pattern + */ + protected validateAllPatternsMatched( + filters: NormalizedArtifactFilter[], + allRuns: WorkflowRun[], + matchingArtifacts: ArtifactItem[], + ): string[] { + const errors: string[] = []; + + for (const filter of filters) { + // Validate workflow pattern matched at least one run + if (filter.workflow) { + const hasMatchingRun = allRuns.some(run => + filter.workflow!.test(run.name ?? ''), + ); + if (!hasMatchingRun) { + const availableNames = allRuns + .map(r => r.name ?? '(unnamed)') + .join(', '); + errors.push( + `Workflow pattern ${filter.workflow} did not match any workflow runs. ` + + `Available workflows: ${availableNames || '(none)'}`, + ); + // Skip artifact validation for this filter since no runs matched + continue; + } + } + + // Scope artifact validation to runs matching this filter's workflow + const scopedArtifacts = filter.workflow + ? matchingArtifacts.filter(a => { + const runName = + allRuns.find(r => r.id === a.workflow_run?.id)?.name ?? ''; + return filter.workflow!.test(runName); + }) + : matchingArtifacts; + + // Validate each artifact pattern matched at least one artifact + for (const artifactPattern of filter.artifacts) { + const matched = scopedArtifacts.some(a => artifactPattern.test(a.name)); + if (!matched) { + const availableNames = scopedArtifacts.map(a => a.name).join(', '); + const workflowDesc = filter.workflow + ? ` (from workflow ${filter.workflow})` + : ''; + errors.push( + `Artifact pattern ${artifactPattern}${workflowDesc} did not match any artifacts. ` + + `Available artifact names: ${availableNames || '(none)'}`, + ); + } + } + } + + return errors; + } + /** * Fetches artifacts using the new workflow-based approach * @@ -515,13 +593,13 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { */ protected async doListArtifactsWithFilters( revision: string, - filters: NormalizedArtifactFilter[] + filters: NormalizedArtifactFilter[], ): Promise { for (let tries = 0; tries < MAX_TRIES; tries++) { this.logger.info( `Fetching GitHub artifacts for revision ${revision} using artifact filters (attempt ${ tries + 1 - } of ${MAX_TRIES})` + } of ${MAX_TRIES})`, ); const allRuns = await this.getWorkflowRunsForCommit(revision); @@ -531,24 +609,24 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { this.logger.info( `Waiting ${ ARTIFACTS_POLLING_INTERVAL / MILLISECONDS - } seconds for workflow runs to become available...` + } seconds for workflow runs to become available...`, ); await sleep(ARTIFACTS_POLLING_INTERVAL); continue; } throw new Error( - `No workflow runs found for revision "${revision}" (tries: ${MAX_TRIES})` + `No workflow runs found for revision "${revision}" (tries: ${MAX_TRIES})`, ); } const filteredRuns = this.filterWorkflowRuns(allRuns, filters); this.logger.debug( - `${filteredRuns.length} of ${allRuns.length} workflow runs match filters` + `${filteredRuns.length} of ${allRuns.length} workflow runs match filters`, ); const matchingArtifacts = await this.getArtifactsFromWorkflowRuns( filteredRuns, - filters + filters, ); if (matchingArtifacts.length === 0) { @@ -557,24 +635,51 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { this.logger.info( `Waiting ${ ARTIFACTS_POLLING_INTERVAL / MILLISECONDS - } seconds for artifacts to become available...` + } seconds for artifacts to become available...`, ); await sleep(ARTIFACTS_POLLING_INTERVAL); continue; } throw new Error( - `No artifacts matching filters found for revision "${revision}" (tries: ${MAX_TRIES})` + `No artifacts matching filters found for revision "${revision}" (tries: ${MAX_TRIES})`, + ); + } + + // Validate that ALL configured patterns matched, not just some. + // This catches cases like: config says ['craft-binary', 'craft-docs'] + // but only 'craft-binary' was found. Without this check, the publish + // would silently proceed with missing artifacts. + const validationErrors = this.validateAllPatternsMatched( + filters, + allRuns, + matchingArtifacts, + ); + if (validationErrors.length > 0) { + if (tries + 1 < MAX_TRIES) { + this.logger.info( + `Not all patterns matched yet (${validationErrors.length} unmatched). ` + + `Waiting ${ + ARTIFACTS_POLLING_INTERVAL / MILLISECONDS + } seconds before retrying...`, + ); + await sleep(ARTIFACTS_POLLING_INTERVAL); + continue; + } + throw new Error( + `Not all configured artifact patterns were satisfied:\n - ${validationErrors.join('\n - ')}\n` + + `Check that your workflow names and artifact names in .craft.yml match ` + + `what your CI actually produces.`, ); } this.logger.debug( - `Downloading ${matchingArtifacts.length} artifacts in parallel...` + `Downloading ${matchingArtifacts.length} artifacts in parallel...`, ); return await this.downloadArtifactsInParallel(matchingArtifacts); } throw new Error( - `Failed to fetch artifacts for revision "${revision}" (tries: ${MAX_TRIES})` + `Failed to fetch artifacts for revision "${revision}" (tries: ${MAX_TRIES})`, ); } @@ -582,7 +687,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { * @inheritDoc */ protected async doListArtifactsForRevision( - revision: string + revision: string, ): Promise { const artifactsConfig = this.config.artifacts as | GitHubArtifactsConfig @@ -607,7 +712,7 @@ export class GitHubArtifactProvider extends BaseArtifactProvider { } export function lazyRequest( - cb: lazyRequestCallback + cb: lazyRequestCallback, ): lazyRequestCallback { let data: T; return async () => { diff --git a/src/commands/publish.ts b/src/commands/publish.ts index 4fa84f8a..6e026a50 100644 --- a/src/commands/publish.ts +++ b/src/commands/publish.ts @@ -509,6 +509,7 @@ export async function publishMain(argv: PublishOptions): Promise { await checkRevisionStatus(statusProvider, revision, argv.noStatusCheck); await printRevisionSummary(artifactProvider, revision); + await checkRequiredArtifacts(artifactProvider, revision, config.requireNames); // Find targets diff --git a/src/utils/__tests__/filters.test.ts b/src/utils/__tests__/filters.test.ts index d29722a5..aa1f8f3d 100644 --- a/src/utils/__tests__/filters.test.ts +++ b/src/utils/__tests__/filters.test.ts @@ -1,4 +1,9 @@ -import { stringToRegexp, escapeRegex, patternToRegexp } from '../filters'; +import { + stringToRegexp, + escapeRegex, + patternToRegexp, + globToRegex, +} from '../filters'; describe('stringToRegexp', () => { test('converts string without special characters', () => { @@ -47,6 +52,32 @@ describe('escapeRegex', () => { }); }); +describe('globToRegex', () => { + test('converts * to .*', () => { + expect(globToRegex('sentry-*')).toBe('sentry-.*'); + }); + + test('converts ? to .', () => { + expect(globToRegex('build-?.zip')).toBe('build-.\\.zip'); + }); + + test('escapes special regex characters', () => { + expect(globToRegex('output.tar.gz')).toBe('output\\.tar\\.gz'); + }); + + test('handles multiple globs', () => { + expect(globToRegex('*-build-*')).toBe('.*-build-.*'); + }); + + test('handles mixed * and ?', () => { + expect(globToRegex('build-?-*.zip')).toBe('build-.-.*\\.zip'); + }); + + test('leaves plain alphanumeric strings unchanged', () => { + expect(globToRegex('simple')).toBe('simple'); + }); +}); + describe('patternToRegexp', () => { test('converts regex string to RegExp', () => { const result = patternToRegexp('/^build-.*$/'); @@ -74,4 +105,43 @@ describe('patternToRegexp', () => { expect(result.test('output.tar.gz')).toBe(true); expect(result.test('outputXtarXgz')).toBe(false); }); + + test('converts glob pattern with * to matching RegExp', () => { + const result = patternToRegexp('sentry-*'); + expect(result).toBeInstanceOf(RegExp); + expect(result.test('sentry-linux-x64')).toBe(true); + expect(result.test('sentry-darwin-arm64')).toBe(true); + expect(result.test('sentry-')).toBe(true); + expect(result.test('other-thing')).toBe(false); + expect(result.test('my-sentry-linux')).toBe(false); + }); + + test('converts glob pattern with ? to matching RegExp', () => { + const result = patternToRegexp('build-?.zip'); + expect(result.test('build-a.zip')).toBe(true); + expect(result.test('build-1.zip')).toBe(true); + expect(result.test('build-ab.zip')).toBe(false); + expect(result.test('build-.zip')).toBe(false); + }); + + test('converts glob pattern with * preserving other special chars', () => { + const result = patternToRegexp('output.*.tar.gz'); + expect(result.test('output.v2.tar.gz')).toBe(true); + expect(result.test('output.release.tar.gz')).toBe(true); + expect(result.test('outputXv2XtarXgz')).toBe(false); + }); + + test('converts glob pattern *.tgz to match any .tgz file', () => { + const result = patternToRegexp('*.tgz'); + expect(result.test('package-1.0.0.tgz')).toBe(true); + expect(result.test('my-lib.tgz')).toBe(true); + expect(result.test('package-1.0.0.zip')).toBe(false); + }); + + test('glob * does not match across nothing when anchored', () => { + const result = patternToRegexp('prefix-*-suffix'); + expect(result.test('prefix--suffix')).toBe(true); + expect(result.test('prefix-middle-suffix')).toBe(true); + expect(result.test('prefix-suffix')).toBe(false); + }); }); diff --git a/src/utils/filters.ts b/src/utils/filters.ts index 8046d2d1..77b384d7 100644 --- a/src/utils/filters.ts +++ b/src/utils/filters.ts @@ -32,14 +32,54 @@ export function escapeRegex(str: string): string { return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); } +/** + * Returns true if the string contains glob wildcard characters (* or ?) + * that are not inside a regex string (i.e., not wrapped in slashes). + */ +function hasGlobChars(str: string): boolean { + return /[*?]/.test(str); +} + +/** + * Converts a glob pattern string to a regex string. + * + * Glob characters: + * * → .* (match zero or more characters) + * ? → . (match exactly one character) + * + * All other regex-special characters are escaped. + * + * @param pattern Glob pattern string + * @returns Regex source string (without anchors) + */ +export function globToRegex(pattern: string): string { + let result = ''; + for (const char of pattern) { + if (char === '*') { + result += '.*'; + } else if (char === '?') { + result += '.'; + } else { + result += escapeRegex(char); + } + } + return result; +} + /** * Converts a pattern string to a RegExp. - * If wrapped in slashes (e.g., /^foo.*$/), parses as regex. - * Otherwise, creates an exact match pattern. + * + * Supports three formats: + * 1. Regex string wrapped in slashes (e.g., /^foo.*$/): parsed as RegExp + * 2. Glob pattern with * or ? (e.g., sentry-*): converted to regex + * 3. Plain string (e.g., craft-binary): exact match */ export function patternToRegexp(pattern: string): RegExp { if (pattern.startsWith('/') && pattern.lastIndexOf('/') > 0) { return stringToRegexp(pattern); } + if (hasGlobChars(pattern)) { + return new RegExp(`^${globToRegex(pattern)}$`); + } return new RegExp(`^${escapeRegex(pattern)}$`); }