diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e68bfdcdf8..c30a2c9486 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -2,6 +2,7 @@ name: "Code Scanning - Action" on: push: + pull_request: schedule: - cron: '0 0 * * 0' diff --git a/packages/artifact/RELEASES.md b/packages/artifact/RELEASES.md index 3fa8a5619b..0c41df2071 100644 --- a/packages/artifact/RELEASES.md +++ b/packages/artifact/RELEASES.md @@ -46,3 +46,11 @@ - Update to latest @actions/core version +### 0.4.2 + +- Improved retry-ability when a partial artifact download is encountered + +### 0.5.0 + +- Improved retry-ability for all http calls during artifact upload and download if an error is encountered + diff --git a/packages/artifact/__tests__/download.test.ts b/packages/artifact/__tests__/download.test.ts index b77e5a2bc3..05e5707108 100644 --- a/packages/artifact/__tests__/download.test.ts +++ b/packages/artifact/__tests__/download.test.ts @@ -71,7 +71,7 @@ describe('Download Tests', () => { setupFailedResponse() const downloadHttpClient = new DownloadHttpClient() expect(downloadHttpClient.listArtifacts()).rejects.toThrow( - 'Unable to list artifacts for the run' + 'List Artifacts failed: Artifact service responded with 500' ) }) @@ -113,7 +113,7 @@ describe('Download Tests', () => { configVariables.getRuntimeUrl() ) ).rejects.toThrow( - `Unable to get ContainersItems from ${configVariables.getRuntimeUrl()}` + `Get Container Items failed: Artifact service responded with 500` ) }) diff --git a/packages/artifact/__tests__/retry.test.ts b/packages/artifact/__tests__/retry.test.ts new file mode 100644 index 0000000000..102342a9fd --- /dev/null +++ b/packages/artifact/__tests__/retry.test.ts @@ -0,0 +1,114 @@ +import * as http from 'http' +import * as net from 'net' +import * as core from '@actions/core' +import * as configVariables from '../src/internal/config-variables' +import {retry} from '../src/internal/requestUtils' +import {IHttpClientResponse} from '@actions/http-client/interfaces' +import {HttpClientResponse} from '@actions/http-client' + +jest.mock('../src/internal/config-variables') + +interface ITestResult { + responseCode: number + errorMessage: string | null +} + +async function testRetry( + responseCodes: number[], + expectedResult: ITestResult +): Promise { + const reverse = responseCodes.reverse() // Reverse responses since we pop from end + if (expectedResult.errorMessage) { + // we expect some exception to be thrown + expect( + retry( + 'test', + async () => handleResponse(reverse.pop()), + new Map(), // extra error message for any particular http codes + configVariables.getRetryLimit() + ) + ).rejects.toThrow(expectedResult.errorMessage) + } else { + // we expect a correct status code to be returned + const actualResult = await retry( + 'test', + async () => handleResponse(reverse.pop()), + new Map(), // extra error message for any particular http codes + configVariables.getRetryLimit() + ) + expect(actualResult.message.statusCode).toEqual(expectedResult.responseCode) + } +} + +async function handleResponse( + testResponseCode: number | undefined +): Promise { + if (!testResponseCode) { + throw new Error( + 'Test incorrectly set up. reverse.pop() was called too many times so not enough test response codes were supplied' + ) + } + + return setupSingleMockResponse(testResponseCode) +} + +beforeAll(async () => { + // mock all output so that there is less noise when running tests + jest.spyOn(console, 'log').mockImplementation(() => {}) + jest.spyOn(core, 'debug').mockImplementation(() => {}) + jest.spyOn(core, 'info').mockImplementation(() => {}) + jest.spyOn(core, 'warning').mockImplementation(() => {}) + jest.spyOn(core, 'error').mockImplementation(() => {}) +}) + +/** + * Helpers used to setup mocking for the HttpClient + */ +async function emptyMockReadBody(): Promise { + return new Promise(resolve => { + resolve() + }) +} + +async function setupSingleMockResponse( + statusCode: number +): Promise { + const mockMessage = new http.IncomingMessage(new net.Socket()) + const mockReadBody = emptyMockReadBody + mockMessage.statusCode = statusCode + return new Promise(resolve => { + resolve({ + message: mockMessage, + readBody: mockReadBody + }) + }) +} + +test('retry works on successful response', async () => { + await testRetry([200], { + responseCode: 200, + errorMessage: null + }) +}) + +test('retry works after retryable status code', async () => { + await testRetry([503, 200], { + responseCode: 200, + errorMessage: null + }) +}) + +test('retry fails after exhausting retries', async () => { + // __mocks__/config-variables caps the max retry count in tests to 2 + await testRetry([503, 503, 200], { + responseCode: 200, + errorMessage: 'test failed: Artifact service responded with 503' + }) +}) + +test('retry fails after non-retryable status code', async () => { + await testRetry([500, 200], { + responseCode: 500, + errorMessage: 'test failed: Artifact service responded with 500' + }) +}) diff --git a/packages/artifact/__tests__/upload.test.ts b/packages/artifact/__tests__/upload.test.ts index f473b2e0f2..d21c78abdf 100644 --- a/packages/artifact/__tests__/upload.test.ts +++ b/packages/artifact/__tests__/upload.test.ts @@ -102,7 +102,7 @@ describe('Upload Tests', () => { uploadHttpClient.createArtifactInFileContainer(artifactName) ).rejects.toEqual( new Error( - `Unable to create a container for the artifact invalid-artifact-name at ${getArtifactUrl()}` + `Create Artifact Container failed: The artifact name invalid-artifact-name is not valid. Request URL ${getArtifactUrl()}` ) ) }) @@ -125,7 +125,7 @@ describe('Upload Tests', () => { uploadHttpClient.createArtifactInFileContainer(artifactName) ).rejects.toEqual( new Error( - 'Artifact storage quota has been hit. Unable to upload any new artifacts' + 'Create Artifact Container failed: Artifact storage quota has been hit. Unable to upload any new artifacts' ) ) }) @@ -362,7 +362,9 @@ describe('Upload Tests', () => { const uploadHttpClient = new UploadHttpClient() expect( uploadHttpClient.patchArtifactSize(-2, 'my-artifact') - ).rejects.toThrow('Unable to finish uploading artifact my-artifact') + ).rejects.toThrow( + 'Finalize artifact upload failed: Artifact service responded with 400' + ) }) /** diff --git a/packages/artifact/package-lock.json b/packages/artifact/package-lock.json index b0d828d4ae..f75bda69a4 100644 --- a/packages/artifact/package-lock.json +++ b/packages/artifact/package-lock.json @@ -1,6 +1,6 @@ { "name": "@actions/artifact", - "version": "0.4.1", + "version": "0.5.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/packages/artifact/package.json b/packages/artifact/package.json index b2df338aed..c76d4b1d3b 100644 --- a/packages/artifact/package.json +++ b/packages/artifact/package.json @@ -1,6 +1,6 @@ { "name": "@actions/artifact", - "version": "0.4.1", + "version": "0.5.0", "preview": true, "description": "Actions artifact lib", "keywords": [ diff --git a/packages/artifact/src/internal/download-http-client.ts b/packages/artifact/src/internal/download-http-client.ts index e090c54732..5f679d2d5d 100644 --- a/packages/artifact/src/internal/download-http-client.ts +++ b/packages/artifact/src/internal/download-http-client.ts @@ -11,7 +11,8 @@ import { tryGetRetryAfterValueTimeInMilliseconds, displayHttpDiagnostics, getFileSize, - rmFile + rmFile, + sleep } from './utils' import {URL} from 'url' import {StatusReporter} from './status-reporter' @@ -22,6 +23,7 @@ import {HttpManager} from './http-manager' import {DownloadItem} from './download-specification' import {getDownloadFileConcurrency, getRetryLimit} from './config-variables' import {IncomingHttpHeaders} from 'http' +import {retryHttpClientRequest} from './requestUtils' export class DownloadHttpClient { // http manager is used for concurrent connections when downloading multiple files at once @@ -46,16 +48,11 @@ export class DownloadHttpClient { // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.downloadHttpManager.getClient(0) const headers = getDownloadHeaders('application/json') - const response = await client.get(artifactUrl, headers) - const body: string = await response.readBody() - - if (isSuccessStatusCode(response.message.statusCode) && body) { - return JSON.parse(body) - } - displayHttpDiagnostics(response) - throw new Error( - `Unable to list artifacts for the run. Resource Url ${artifactUrl}` + const response = await retryHttpClientRequest('List Artifacts', async () => + client.get(artifactUrl, headers) ) + const body: string = await response.readBody() + return JSON.parse(body) } /** @@ -74,14 +71,12 @@ export class DownloadHttpClient { // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.downloadHttpManager.getClient(0) const headers = getDownloadHeaders('application/json') - const response = await client.get(resourceUrl.toString(), headers) + const response = await retryHttpClientRequest( + 'Get Container Items', + async () => client.get(resourceUrl.toString(), headers) + ) const body: string = await response.readBody() - - if (isSuccessStatusCode(response.message.statusCode) && body) { - return JSON.parse(body) - } - displayHttpDiagnostics(response) - throw new Error(`Unable to get ContainersItems from ${resourceUrl}`) + return JSON.parse(body) } /** @@ -188,14 +183,14 @@ export class DownloadHttpClient { core.info( `Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download` ) - await new Promise(resolve => setTimeout(resolve, retryAfterValue)) + await sleep(retryAfterValue) } else { // Back off using an exponential value that depends on the retry count const backoffTime = getExponentialRetryTimeInMilliseconds(retryCount) core.info( `Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download` ) - await new Promise(resolve => setTimeout(resolve, backoffTime)) + await sleep(backoffTime) } core.info( `Finished backoff for retry #${retryCount}, continuing with download` diff --git a/packages/artifact/src/internal/requestUtils.ts b/packages/artifact/src/internal/requestUtils.ts new file mode 100644 index 0000000000..d11db03dbb --- /dev/null +++ b/packages/artifact/src/internal/requestUtils.ts @@ -0,0 +1,79 @@ +import {IHttpClientResponse} from '@actions/http-client/interfaces' +import { + isRetryableStatusCode, + isSuccessStatusCode, + sleep, + getExponentialRetryTimeInMilliseconds, + displayHttpDiagnostics +} from './utils' +import * as core from '@actions/core' +import {getRetryLimit} from './config-variables' + +export async function retry( + name: string, + operation: () => Promise, + customErrorMessages: Map, + maxAttempts: number +): Promise { + let response: IHttpClientResponse | undefined = undefined + let statusCode: number | undefined = undefined + let isRetryable = false + let errorMessage = '' + let customErrorInformation: string | undefined = undefined + let attempt = 1 + + while (attempt <= maxAttempts) { + try { + response = await operation() + statusCode = response.message.statusCode + + if (isSuccessStatusCode(statusCode)) { + return response + } + + // Extra error information that we want to display if a particular response code is hit + if (statusCode) { + customErrorInformation = customErrorMessages.get(statusCode) + } + + isRetryable = isRetryableStatusCode(statusCode) + errorMessage = `Artifact service responded with ${statusCode}` + } catch (error) { + isRetryable = true + errorMessage = error.message + } + + if (!isRetryable) { + core.info(`${name} - Error is not retryable`) + if (response) { + displayHttpDiagnostics(response) + } + break + } + + core.info( + `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` + ) + + await sleep(getExponentialRetryTimeInMilliseconds(attempt)) + attempt++ + } + + if (response) { + displayHttpDiagnostics(response) + } + + if (customErrorInformation) { + throw Error(`${name} failed: ${customErrorInformation}`) + } + throw Error(`${name} failed: ${errorMessage}`) +} + +export async function retryHttpClientRequest( + name: string, + method: () => Promise, + customErrorMessages: Map = new Map(), + maxAttempts = getRetryLimit() +): Promise { + return await retry(name, method, customErrorMessages, maxAttempts) +} diff --git a/packages/artifact/src/internal/upload-http-client.ts b/packages/artifact/src/internal/upload-http-client.ts index b77e5fd0a2..e632c8706c 100644 --- a/packages/artifact/src/internal/upload-http-client.ts +++ b/packages/artifact/src/internal/upload-http-client.ts @@ -15,11 +15,11 @@ import { isRetryableStatusCode, isSuccessStatusCode, isThrottledStatusCode, - isForbiddenStatusCode, displayHttpDiagnostics, getExponentialRetryTimeInMilliseconds, tryGetRetryAfterValueTimeInMilliseconds, - getProperRetention + getProperRetention, + sleep } from './utils' import { getUploadChunkSize, @@ -31,12 +31,13 @@ import {promisify} from 'util' import {URL} from 'url' import {performance} from 'perf_hooks' import {StatusReporter} from './status-reporter' -import {HttpClientResponse} from '@actions/http-client/index' +import {HttpCodes} from '@actions/http-client' import {IHttpClientResponse} from '@actions/http-client/interfaces' import {HttpManager} from './http-manager' import {UploadSpecification} from './upload-specification' import {UploadOptions} from './upload-options' import {createGZipFileOnDisk, createGZipFileInBuffer} from './upload-gzip' +import {retryHttpClientRequest} from './requestUtils' const stat = promisify(fs.stat) export class UploadHttpClient { @@ -80,23 +81,28 @@ export class UploadHttpClient { // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.uploadHttpManager.getClient(0) const headers = getUploadHeaders('application/json', false) - const rawResponse = await client.post(artifactUrl, data, headers) - const body: string = await rawResponse.readBody() - - if (isSuccessStatusCode(rawResponse.message.statusCode) && body) { - return JSON.parse(body) - } else if (isForbiddenStatusCode(rawResponse.message.statusCode)) { - // if a 403 is returned when trying to create a file container, the customer has exceeded - // their storage quota so no new artifact containers can be created - throw new Error( - `Artifact storage quota has been hit. Unable to upload any new artifacts` - ) - } else { - displayHttpDiagnostics(rawResponse) - throw new Error( - `Unable to create a container for the artifact ${artifactName} at ${artifactUrl}` - ) - } + + // Extra information to display when a particular HTTP code is returned + // If a 403 is returned when trying to create a file container, the customer has exceeded + // their storage quota so no new artifact containers can be created + const customErrorMessages: Map = new Map([ + [ + HttpCodes.Forbidden, + 'Artifact storage quota has been hit. Unable to upload any new artifacts' + ], + [ + HttpCodes.BadRequest, + `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}` + ] + ]) + + const response = await retryHttpClientRequest( + 'Create Artifact Container', + async () => client.post(artifactUrl, data, headers), + customErrorMessages + ) + const body: string = await response.readBody() + return JSON.parse(body) } /** @@ -417,13 +423,13 @@ export class UploadHttpClient { core.info( `Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload` ) - await new Promise(resolve => setTimeout(resolve, retryAfterValue)) + await sleep(retryAfterValue) } else { const backoffTime = getExponentialRetryTimeInMilliseconds(retryCount) core.info( `Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}` ) - await new Promise(resolve => setTimeout(resolve, backoffTime)) + await sleep(backoffTime) } core.info( `Finished backoff for retry #${retryCount}, continuing with upload` @@ -486,7 +492,6 @@ export class UploadHttpClient { * Updating the size indicates that we are done uploading all the contents of the artifact */ async patchArtifactSize(size: number, artifactName: string): Promise { - const headers = getUploadHeaders('application/json', false) const resourceUrl = new URL(getArtifactUrl()) resourceUrl.searchParams.append('artifactName', artifactName) @@ -496,25 +501,26 @@ export class UploadHttpClient { // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.uploadHttpManager.getClient(0) - const response: HttpClientResponse = await client.patch( - resourceUrl.toString(), - data, - headers + const headers = getUploadHeaders('application/json', false) + + // Extra information to display when a particular HTTP code is returned + const customErrorMessages: Map = new Map([ + [ + HttpCodes.NotFound, + `An Artifact with the name ${artifactName} was not found` + ] + ]) + + // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this + const response = await retryHttpClientRequest( + 'Finalize artifact upload', + async () => client.patch(resourceUrl.toString(), data, headers), + customErrorMessages + ) + await response.readBody() + core.debug( + `Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}` ) - const body: string = await response.readBody() - if (isSuccessStatusCode(response.message.statusCode)) { - core.debug( - `Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}` - ) - } else if (response.message.statusCode === 404) { - throw new Error(`An Artifact with the name ${artifactName} was not found`) - } else { - displayHttpDiagnostics(response) - core.info(body) - throw new Error( - `Unable to finish uploading artifact ${artifactName} to ${resourceUrl}` - ) - } } } diff --git a/packages/artifact/src/internal/utils.ts b/packages/artifact/src/internal/utils.ts index 2f4f2f470a..9e1b0123b6 100644 --- a/packages/artifact/src/internal/utils.ts +++ b/packages/artifact/src/internal/utils.ts @@ -65,7 +65,7 @@ export function isForbiddenStatusCode(statusCode?: number): boolean { return statusCode === HttpCodes.Forbidden } -export function isRetryableStatusCode(statusCode?: number): boolean { +export function isRetryableStatusCode(statusCode: number | undefined): boolean { if (!statusCode) { return false } @@ -335,3 +335,7 @@ export function getProperRetention( } return retention } + +export async function sleep(milliseconds: number): Promise { + return new Promise(resolve => setTimeout(resolve, milliseconds)) +} diff --git a/packages/cache/RELEASES.md b/packages/cache/RELEASES.md index 5b94146ab5..dc3901f94a 100644 --- a/packages/cache/RELEASES.md +++ b/packages/cache/RELEASES.md @@ -32,4 +32,8 @@ - Fixes uploadChunk to throw an error if any unsuccessful response code is received ### 1.0.5 -- Fix to ensure Windows cache paths get resolved correctly \ No newline at end of file +- Fix to ensure Windows cache paths get resolved correctly + +### 1.0.6 +- Make caching more verbose [#650](https://github.com/actions/toolkit/pull/650) +- Use GNU tar on macOS if available [#701](https://github.com/actions/toolkit/pull/701) \ No newline at end of file diff --git a/packages/cache/__tests__/tar.test.ts b/packages/cache/__tests__/tar.test.ts index b891d85b66..3f7a1b3b91 100644 --- a/packages/cache/__tests__/tar.test.ts +++ b/packages/cache/__tests__/tar.test.ts @@ -12,6 +12,8 @@ jest.mock('@actions/io') const IS_WINDOWS = process.platform === 'win32' +const defaultTarPath = process.platform === 'darwin' ? 'gtar' : 'tar' + function getTempDir(): string { return path.join(__dirname, '_temp', 'tar') } @@ -38,14 +40,13 @@ test('zstd extract tar', async () => { ? `${process.env['windir']}\\fakepath\\cache.tar` : 'cache.tar' const workspace = process.env['GITHUB_WORKSPACE'] - const tarPath = 'tar' await tar.extractTar(archivePath, CompressionMethod.Zstd) expect(mkdirMock).toHaveBeenCalledWith(workspace) expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${tarPath}"`, + `"${defaultTarPath}"`, [ '--use-compress-program', 'zstd -d --long=30', @@ -72,7 +73,7 @@ test('gzip extract tar', async () => { expect(mkdirMock).toHaveBeenCalledWith(workspace) const tarPath = IS_WINDOWS ? `${process.env['windir']}\\System32\\tar.exe` - : 'tar' + : defaultTarPath expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( `"${tarPath}"`, @@ -125,7 +126,6 @@ test('zstd create tar', async () => { const archiveFolder = getTempDir() const workspace = process.env['GITHUB_WORKSPACE'] const sourceDirectories = ['~/.npm/cache', `${workspace}/dist`] - const tarPath = 'tar' await fs.promises.mkdir(archiveFolder, {recursive: true}) @@ -133,7 +133,7 @@ test('zstd create tar', async () => { expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( - `"${tarPath}"`, + `"${defaultTarPath}"`, [ '--posix', '--use-compress-program', @@ -165,7 +165,7 @@ test('gzip create tar', async () => { const tarPath = IS_WINDOWS ? `${process.env['windir']}\\System32\\tar.exe` - : 'tar' + : defaultTarPath expect(execMock).toHaveBeenCalledTimes(1) expect(execMock).toHaveBeenCalledWith( @@ -186,3 +186,73 @@ test('gzip create tar', async () => { } ) }) + +test('zstd list tar', async () => { + const execMock = jest.spyOn(exec, 'exec') + + const archivePath = IS_WINDOWS + ? `${process.env['windir']}\\fakepath\\cache.tar` + : 'cache.tar' + + await tar.listTar(archivePath, CompressionMethod.Zstd) + + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"${defaultTarPath}"`, + [ + '--use-compress-program', + 'zstd -d --long=30', + '-tf', + IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, + '-P' + ].concat(IS_WINDOWS ? ['--force-local'] : []), + {cwd: undefined} + ) +}) + +test('zstdWithoutLong list tar', async () => { + const execMock = jest.spyOn(exec, 'exec') + + const archivePath = IS_WINDOWS + ? `${process.env['windir']}\\fakepath\\cache.tar` + : 'cache.tar' + + await tar.listTar(archivePath, CompressionMethod.ZstdWithoutLong) + + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"${defaultTarPath}"`, + [ + '--use-compress-program', + 'zstd -d', + '-tf', + IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, + '-P' + ].concat(IS_WINDOWS ? ['--force-local'] : []), + {cwd: undefined} + ) +}) + +test('gzip list tar', async () => { + const execMock = jest.spyOn(exec, 'exec') + const archivePath = IS_WINDOWS + ? `${process.env['windir']}\\fakepath\\cache.tar` + : 'cache.tar' + + await tar.listTar(archivePath, CompressionMethod.Gzip) + + const tarPath = IS_WINDOWS + ? `${process.env['windir']}\\System32\\tar.exe` + : defaultTarPath + expect(execMock).toHaveBeenCalledTimes(1) + expect(execMock).toHaveBeenCalledWith( + `"${tarPath}"`, + [ + '-z', + '-tf', + IS_WINDOWS ? archivePath.replace(/\\/g, '/') : archivePath, + '-P' + ], + {cwd: undefined} + ) +}) diff --git a/packages/cache/package-lock.json b/packages/cache/package-lock.json index b9d44210d1..087376b030 100644 --- a/packages/cache/package-lock.json +++ b/packages/cache/package-lock.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "1.0.5", + "version": "1.0.6", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/packages/cache/package.json b/packages/cache/package.json index 05635e8f25..62dd2bea7e 100644 --- a/packages/cache/package.json +++ b/packages/cache/package.json @@ -1,6 +1,6 @@ { "name": "@actions/cache", - "version": "1.0.5", + "version": "1.0.6", "preview": true, "description": "Actions cache lib", "keywords": [ diff --git a/packages/cache/src/cache.ts b/packages/cache/src/cache.ts index 57ea1527bf..a6870f8f96 100644 --- a/packages/cache/src/cache.ts +++ b/packages/cache/src/cache.ts @@ -2,7 +2,7 @@ import * as core from '@actions/core' import * as path from 'path' import * as utils from './internal/cacheUtils' import * as cacheHttpClient from './internal/cacheHttpClient' -import {createTar, extractTar} from './internal/tar' +import {createTar, extractTar, listTar} from './internal/tar' import {DownloadOptions, UploadOptions} from './options' export class ValidationError extends Error { @@ -100,6 +100,10 @@ export async function restoreCache( options ) + if (core.isDebug()) { + await listTar(archivePath, compressionMethod) + } + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) core.info( `Cache Size: ~${Math.round( @@ -108,6 +112,7 @@ export async function restoreCache( ) await extractTar(archivePath, compressionMethod) + core.info('Cache restored successfully') } finally { // Try to delete the archive to save space try { @@ -162,6 +167,9 @@ export async function saveCache( core.debug(`Archive Path: ${archivePath}`) await createTar(archiveFolder, cachePaths, compressionMethod) + if (core.isDebug()) { + await listTar(archivePath, compressionMethod) + } const fileSizeLimit = 5 * 1024 * 1024 * 1024 // 5GB per repo limit const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath) diff --git a/packages/cache/src/internal/cacheHttpClient.ts b/packages/cache/src/internal/cacheHttpClient.ts index dab39115a2..000257a502 100644 --- a/packages/cache/src/internal/cacheHttpClient.ts +++ b/packages/cache/src/internal/cacheHttpClient.ts @@ -301,6 +301,10 @@ export async function saveCache( // Commit Cache core.debug('Commiting cache') const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath) + core.info( + `Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)` + ) + const commitCacheResponse = await commitCache(httpClient, cacheId, cacheSize) if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { throw new Error( diff --git a/packages/cache/src/internal/tar.ts b/packages/cache/src/internal/tar.ts index 680433e741..1b70dac09f 100644 --- a/packages/cache/src/internal/tar.ts +++ b/packages/cache/src/internal/tar.ts @@ -9,18 +9,29 @@ async function getTarPath( args: string[], compressionMethod: CompressionMethod ): Promise { - const IS_WINDOWS = process.platform === 'win32' - if (IS_WINDOWS) { - const systemTar = `${process.env['windir']}\\System32\\tar.exe` - if (compressionMethod !== CompressionMethod.Gzip) { - // We only use zstandard compression on windows when gnu tar is installed due to - // a bug with compressing large files with bsdtar + zstd - args.push('--force-local') - } else if (existsSync(systemTar)) { - return systemTar - } else if (await utils.isGnuTarInstalled()) { - args.push('--force-local') + switch (process.platform) { + case 'win32': { + const systemTar = `${process.env['windir']}\\System32\\tar.exe` + if (compressionMethod !== CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local') + } else if (existsSync(systemTar)) { + return systemTar + } else if (await utils.isGnuTarInstalled()) { + args.push('--force-local') + } + break } + case 'darwin': { + const gnuTar = await io.which('gtar', false) + if (gnuTar) { + return gnuTar + } + break + } + default: + break } return await io.which('tar', true) } @@ -113,3 +124,30 @@ export async function createTar( ] await execTar(args, compressionMethod, archiveFolder) } + +export async function listTar( + archivePath: string, + compressionMethod: CompressionMethod +): Promise { + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. + // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram(): string[] { + switch (compressionMethod) { + case CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30'] + case CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d'] + default: + return ['-z'] + } + } + const args = [ + ...getCompressionProgram(), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ] + await execTar(args, compressionMethod) +}