From 93763bf4ef2a5457615c5d7bd77ef154d4874b16 Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 07:11:20 +0000 Subject: [PATCH 01/11] feat: Enable full object checksum validation on JSON path Adds validation for client-provided (pre-calculated) and client-calculated CRC32C and MD5 hashes when the final upload request is made via the JSON API path (status 200). This ensures consistency checks are performed even when the `Upload` stream is finalized, preventing silent data corruption if the server-reported hash (in the response body) mismatches the client's expected hash. --- src/hash-stream-validator.ts | 10 +++ src/resumable-upload.ts | 161 ++++++++++++++++++++++++++++++++++- 2 files changed, 167 insertions(+), 4 deletions(-) diff --git a/src/hash-stream-validator.ts b/src/hash-stream-validator.ts index 5f4a10e4e..cf2172fbe 100644 --- a/src/hash-stream-validator.ts +++ b/src/hash-stream-validator.ts @@ -81,6 +81,16 @@ class HashStreamValidator extends Transform { return this.#crc32cHash?.toString(); } + /** + * Return the calculated MD5 value, if available. + */ + get md5Digest(): string | undefined { + if (this.#md5Hash && !this.#md5Digest) { + this.#md5Digest = this.#md5Hash.digest('base64'); + } + return this.#md5Digest; + } + _flush(callback: (error?: Error | null | undefined) => void) { if (this.#md5Hash) { this.#md5Digest = this.#md5Hash.digest('base64'); diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index e6a51fd14..857e1b7c2 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -36,10 +36,11 @@ import { getUserAgentString, } from './util.js'; import {GCCL_GCS_CMD_KEY} from './nodejs-common/util.js'; -import {FileMetadata} from './file.js'; +import {FileExceptionMessages, FileMetadata, RequestError} from './file.js'; // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore import {getPackageJSON} from './package-json-helper.cjs'; +import {HashStreamValidator} from './hash-stream-validator.js'; const NOT_FOUND_STATUS_CODE = 404; const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; @@ -149,6 +150,19 @@ export interface UploadConfig extends Pick { */ isPartialUpload?: boolean; + clientCrc32c?: string; + clientMd5Hash?: string; + /** + * Enables CRC32C calculation on the client side. + * The calculated hash will be sent in the final PUT request if `clientCrc32c` is not provided. + */ + crc32c?: boolean; + /** + * Enables MD5 calculation on the client side. + * The calculated hash will be sent in the final PUT request if `clientMd5Hash` is not provided. + */ + md5?: boolean; + /** * A customer-supplied encryption key. See * https://cloud.google.com/storage/docs/encryption#customer-supplied. @@ -334,6 +348,11 @@ export class Upload extends Writable { */ private writeBuffers: Buffer[] = []; private numChunksReadInRequest = 0; + + #hashValidator?: HashStreamValidator; + #clientCrc32c?: string; + #clientMd5Hash?: string; + /** * An array of buffers used for caching the most recent upload chunk. * We should not assume that the server received all bytes sent in the request. @@ -428,6 +447,20 @@ export class Upload extends Writable { this.retryOptions = cfg.retryOptions; this.isPartialUpload = cfg.isPartialUpload ?? false; + this.#clientCrc32c = cfg.clientCrc32c; + this.#clientMd5Hash = cfg.clientMd5Hash; + + const calculateCrc32c = !cfg.clientCrc32c && cfg.crc32c; + const calculateMd5 = !cfg.clientMd5Hash && cfg.md5; + + if (calculateCrc32c || calculateMd5) { + this.#hashValidator = new HashStreamValidator({ + crc32c: calculateCrc32c, + md5: calculateMd5, + updateHashesOnly: true, + }); + } + if (cfg.key) { if (typeof cfg.key === 'string') { const base64Key = Buffer.from(cfg.key).toString('base64'); @@ -518,9 +551,19 @@ export class Upload extends Writable { // Backwards-compatible event this.emit('writing'); - this.writeBuffers.push( - typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk - ); + const bufferChunk = + typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk; + + if (this.#hashValidator) { + try { + this.#hashValidator.write(bufferChunk); + } catch (e) { + this.destroy(e as Error); + return; + } + } + + this.writeBuffers.push(bufferChunk); this.once('readFromChunkBuffer', readCallback); @@ -537,6 +580,36 @@ export class Upload extends Writable { this.localWriteCacheByteLength += buf.byteLength; } + /** + * Compares the client's calculated or provided hash against the server's + * returned hash for a specific checksum type. Destroys the stream on mismatch. + * @param clientHash The client's calculated or provided hash (Base64). + * @param serverHash The hash returned by the server (Base64). + * @param hashType The type of hash ('CRC32C' or 'MD5'). + */ + #validateChecksum( + clientHash: string | undefined, + serverHash: string | undefined, + hashType: 'CRC32C' | 'MD5' + ): boolean { + // Only validate if both client and server hashes are present. + if (clientHash && serverHash) { + if (clientHash !== serverHash) { + const detailMessage = `${hashType} checksum mismatch. Client calculated: ${clientHash}, Server returned: ${serverHash}`; + const code = 'FILE_NO_UPLOAD'; + const primaryMessage = FileExceptionMessages.UPLOAD_MISMATCH; + const detailError = new Error(detailMessage); + const error = new RequestError(primaryMessage); + error.code = code; + error.errors = [detailError]; + + this.destroy(error); + return true; + } + } + return false; + } + /** * Prepends the local buffer to write buffer and resets it. * @@ -929,6 +1002,10 @@ export class Upload extends Writable { // unshifting data back into the queue. This way we will know if this is the last request or not. const isLastChunkOfUpload = !(await this.waitForNextChunk()); + if (isLastChunkOfUpload && this.#hashValidator) { + this.#hashValidator.end(); + } + // Important: put the data back in the queue for the actual upload this.prependLocalBufferToUpstream(); @@ -951,8 +1028,66 @@ export class Upload extends Writable { headers['Content-Length'] = bytesToUpload; headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; + + // Apply X-Goog-Hash header ONLY on the final chunk (WriteObject call) + if (isLastChunkOfUpload) { + const checksums: string[] = []; + + if (this.#hashValidator) { + if (this.#hashValidator.crc32cEnabled) { + // CRC32C value is final after all updates + checksums.push(`crc32c=${this.#hashValidator.crc32c!}`); + } + if (this.#hashValidator.md5Enabled) { + // MD5 digest is either ready on _flush (if stream is ending) or can be accessed now via getter + checksums.push(`md5=${this.#hashValidator.md5Digest!}`); + } + } else { + if (this.#clientCrc32c) { + checksums.push(`crc32c=${this.#clientCrc32c}`); + } + if (this.#clientMd5Hash) { + checksums.push(`md5=${this.#clientMd5Hash}`); + } + } + + if (checksums.length > 0) { + headers['X-Goog-Hash'] = checksums.join(','); + } + } } else { headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; + + // In single chunk mode, if contentLength is set, the entire upload is the final chunk. + const isSingleFinalUpload = !!this.contentLength; + + if (isSingleFinalUpload && this.#hashValidator) { + this.#hashValidator.end(); + } + + if (isSingleFinalUpload) { + const checksums: string[] = []; + + if (this.#hashValidator) { + if (this.#hashValidator.crc32cEnabled) { + checksums.push(`crc32c=${this.#hashValidator.crc32c!}`); + } + if (this.#hashValidator.md5Enabled) { + checksums.push(`md5=${this.#hashValidator.md5Digest!}`); + } + } else { + if (this.#clientCrc32c) { + checksums.push(`crc32c=${this.#clientCrc32c}`); + } + if (this.#clientMd5Hash) { + checksums.push(`md5=${this.#clientMd5Hash}`); + } + } + + if (checksums.length > 0) { + headers['X-Goog-Hash'] = checksums.join(','); + } + } } const reqOpts: GaxiosOptions = { @@ -1047,6 +1182,24 @@ export class Upload extends Writable { this.destroy(err); } else { + const serverCrc32c = resp.data.crc32c; + const serverMd5 = resp.data.md5Hash; + + const clientCrc32cToValidate = + this.#hashValidator?.crc32c || this.#clientCrc32c; + const clientMd5HashToValidate = + this.#hashValidator?.md5Digest || this.#clientMd5Hash; + + if ( + this.#validateChecksum(clientCrc32cToValidate, serverCrc32c, 'CRC32C') + ) { + return; + } + + if (this.#validateChecksum(clientMd5HashToValidate, serverMd5, 'MD5')) { + return; + } + // no need to keep the cache this.#resetLocalBuffersCache(); From b81433ccf11151a0b94012762fcfa1e1bc204ce3 Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 07:33:40 +0000 Subject: [PATCH 02/11] fix: duplicate codes removed --- src/hash-stream-validator.ts | 2 +- src/resumable-upload.ts | 77 ++++++++++++++++-------------------- 2 files changed, 34 insertions(+), 45 deletions(-) diff --git a/src/hash-stream-validator.ts b/src/hash-stream-validator.ts index cf2172fbe..0c3a2ca72 100644 --- a/src/hash-stream-validator.ts +++ b/src/hash-stream-validator.ts @@ -92,7 +92,7 @@ class HashStreamValidator extends Transform { } _flush(callback: (error?: Error | null | undefined) => void) { - if (this.#md5Hash) { + if (this.#md5Hash && !this.#md5Digest) { this.#md5Digest = this.#md5Hash.digest('base64'); } diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index 857e1b7c2..17b75db0a 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -610,6 +610,37 @@ export class Upload extends Writable { return false; } + /** + * Builds and applies the X-Goog-Hash header to the request options + * using either calculated hashes from #hashValidator or pre-calculated + * client-side hashes. This should only be called on the final request. + * + * @param headers The headers object to modify. + */ + #applyChecksumHeaders(headers: GaxiosOptions['headers']) { + const checksums: string[] = []; + + if (this.#hashValidator) { + if (this.#hashValidator.crc32cEnabled) { + checksums.push(`crc32c=${this.#hashValidator.crc32c!}`); + } + if (this.#hashValidator.md5Enabled) { + checksums.push(`md5=${this.#hashValidator.md5Digest!}`); + } + } else { + if (this.#clientCrc32c) { + checksums.push(`crc32c=${this.#clientCrc32c}`); + } + if (this.#clientMd5Hash) { + checksums.push(`md5=${this.#clientMd5Hash}`); + } + } + + if (checksums.length > 0) { + headers!['X-Goog-Hash'] = checksums.join(','); + } + } + /** * Prepends the local buffer to write buffer and resets it. * @@ -1031,29 +1062,7 @@ export class Upload extends Writable { // Apply X-Goog-Hash header ONLY on the final chunk (WriteObject call) if (isLastChunkOfUpload) { - const checksums: string[] = []; - - if (this.#hashValidator) { - if (this.#hashValidator.crc32cEnabled) { - // CRC32C value is final after all updates - checksums.push(`crc32c=${this.#hashValidator.crc32c!}`); - } - if (this.#hashValidator.md5Enabled) { - // MD5 digest is either ready on _flush (if stream is ending) or can be accessed now via getter - checksums.push(`md5=${this.#hashValidator.md5Digest!}`); - } - } else { - if (this.#clientCrc32c) { - checksums.push(`crc32c=${this.#clientCrc32c}`); - } - if (this.#clientMd5Hash) { - checksums.push(`md5=${this.#clientMd5Hash}`); - } - } - - if (checksums.length > 0) { - headers['X-Goog-Hash'] = checksums.join(','); - } + this.#applyChecksumHeaders(headers); } } else { headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; @@ -1066,27 +1075,7 @@ export class Upload extends Writable { } if (isSingleFinalUpload) { - const checksums: string[] = []; - - if (this.#hashValidator) { - if (this.#hashValidator.crc32cEnabled) { - checksums.push(`crc32c=${this.#hashValidator.crc32c!}`); - } - if (this.#hashValidator.md5Enabled) { - checksums.push(`md5=${this.#hashValidator.md5Digest!}`); - } - } else { - if (this.#clientCrc32c) { - checksums.push(`crc32c=${this.#clientCrc32c}`); - } - if (this.#clientMd5Hash) { - checksums.push(`md5=${this.#clientMd5Hash}`); - } - } - - if (checksums.length > 0) { - headers['X-Goog-Hash'] = checksums.join(','); - } + this.#applyChecksumHeaders(headers); } } From 38fbb8011f0ee73178638374448cf797b470a231 Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 10:50:16 +0000 Subject: [PATCH 03/11] added unit test cases --- test/resumable-upload.ts | 392 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 392 insertions(+) diff --git a/test/resumable-upload.ts b/test/resumable-upload.ts index ab0973aba..94ce2da80 100644 --- a/test/resumable-upload.ts +++ b/test/resumable-upload.ts @@ -33,10 +33,12 @@ import { ApiError, CreateUriCallback, PROTOCOL_REGEX, + UploadConfig, } from '../src/resumable-upload.js'; import {GaxiosOptions, GaxiosError, GaxiosResponse} from 'gaxios'; import {GCCL_GCS_CMD_KEY} from '../src/nodejs-common/util.js'; import {getDirName} from '../src/util.js'; +import {FileExceptionMessages} from '../src/file.js'; nock.disableNetConnect(); @@ -55,6 +57,8 @@ const queryPath = '/?userProject=user-project-id'; const X_GOOG_API_HEADER_REGEX = /^gl-node\/(?[^W]+) gccl\/(?[^W]+) gccl-invocation-id\/(?[^W]+) gccl-gcs-cmd\/(?[^W]+)$/; const USER_AGENT_REGEX = /^gcloud-node-storage\/(?[^W]+)$/; +const CORRECT_CLIENT_CRC32C = 'Q2hlY2tzdW0h'; +const INCORRECT_SERVER_CRC32C = 'Q2hlY2tzdVUa'; function mockAuthorizeRequest( code = 200, @@ -1286,6 +1290,270 @@ describe('resumable-upload', () => { }); }); }); + + describe('X-Goog-Hash header injection', () => { + const CALCULATED_CRC32C = 'bzKmHw=='; + const CALCULATED_MD5 = 'VpBzljOcorCZvRIkX5Nt3A=='; + const DUMMY_CONTENT = Buffer.alloc(512, 'a'); + const CHUNK_SIZE = 256; + + let requestCount: number; + + /** + * Creates a mocked HashValidator object with forced getters to return + * predefined hash values, bypassing internal stream calculation logic. + */ + function createMockHashValidator( + crc32cEnabled: boolean, + md5Enabled: boolean + ) { + const mockValidator = { + crc32cEnabled: crc32cEnabled, + md5Enabled: md5Enabled, + end: () => {}, // Mock the end method + write: () => {}, + }; + + Object.defineProperty(mockValidator, 'crc32c', { + get: () => CALCULATED_CRC32C, + configurable: true, + }); + Object.defineProperty(mockValidator, 'md5Digest', { + get: () => CALCULATED_MD5, + configurable: true, + }); + return mockValidator; + } + + const MOCK_AUTH_CLIENT = { + // Mock the request method to return a dummy response + request: async (opts: GaxiosOptions) => { + return { + status: 200, + data: {}, + headers: {}, + config: opts, + statusText: 'OK', + } as GaxiosResponse; + }, + // Mock getRequestHeaders, which is often called before request + getRequestHeaders: async () => ({}), + // Mock getRequestMetadata, which is also used for token retrieval + getRequestMetadata: async () => ({}), + // Mock getRequestMetadataAsync + getRequestMetadataAsync: async () => ({}), + // Mock getClient, as the library might call this internally + getClient: async () => MOCK_AUTH_CLIENT, + // Add any other properties your code uses (e.g., json, credentials) + }; + + /** + * Sets up the `up` instance for hash injection tests. + * @param configOptions Partial UploadConfig to apply. + */ + function setupHashUploadInstance( + configOptions: Partial & {crc32c?: boolean; md5?: boolean} + ) { + up = upload({ + bucket: BUCKET, + file: FILE, + authClient: MOCK_AUTH_CLIENT, + retryOptions: {...RETRY_OPTIONS, maxRetries: 0}, + metadata: { + contentLength: DUMMY_CONTENT.byteLength, + contentType: 'text/plain', + }, + ...configOptions, + }); + + // Manually inject the mock HashStreamValidator if needed + const calculateCrc32c = + !configOptions.clientCrc32c && configOptions.crc32c; + const calculateMd5 = !configOptions.clientMd5Hash && configOptions.md5; + + if (calculateCrc32c || calculateMd5) { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (up as any)['#hashValidator'] = createMockHashValidator( + !!calculateCrc32c, + !!calculateMd5 + ); + } + } + + async function performUpload( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + uploadInstance: any, + data: Buffer, + isMultiChunk: boolean, + expectedCrc32c?: string, + expectedMd5?: string + ): Promise { + const capturedReqOpts: GaxiosOptions[] = []; + requestCount = 0; + + uploadInstance.makeRequestStream = async ( + requestOptions: GaxiosOptions + ) => { + requestCount++; + capturedReqOpts.push(requestOptions); + + await new Promise(resolve => { + requestOptions.body.on('data', () => {}); + requestOptions.body.on('end', resolve); + }); + + const serverCrc32c = expectedCrc32c || CALCULATED_CRC32C; + const serverMd5 = expectedMd5 || CALCULATED_MD5; + if ( + isMultiChunk && + requestCount < Math.ceil(DUMMY_CONTENT.byteLength / CHUNK_SIZE) + ) { + const lastByteReceived = requestCount * CHUNK_SIZE - 1; + return { + data: '', + status: RESUMABLE_INCOMPLETE_STATUS_CODE, + headers: {range: `bytes=0-${lastByteReceived}`}, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any; + } else { + return { + status: 200, + data: { + crc32c: serverCrc32c, + md5Hash: serverMd5, + name: FILE, + bucket: BUCKET, + size: DUMMY_CONTENT.byteLength.toString(), + }, + headers: {}, + config: {}, + statusText: 'OK', + // eslint-disable-next-line @typescript-eslint/no-explicit-any + } as any; + } + }; + + return new Promise((resolve, reject) => { + uploadInstance.on('error', reject); + uploadInstance.on('uploadFinished', () => { + resolve(capturedReqOpts); + }); + + const upstreamBuffer = new Readable({ + read() { + this.push(data); + this.push(null); + }, + }); + upstreamBuffer.pipe(uploadInstance); + }); + } + + describe('single chunk', () => { + it('should include X-Goog-Hash header with crc32c when crc32c is enabled (via validator)', async () => { + setupHashUploadInstance({crc32c: true}); + const reqOpts = await performUpload(up, DUMMY_CONTENT, false); + assert.strictEqual(reqOpts.length, 1); + assert.equal( + reqOpts[0].headers!['X-Goog-Hash'], + `crc32c=${CALCULATED_CRC32C}` + ); + }); + + it('should include X-Goog-Hash header with md5 when md5 is enabled (via validator)', async () => { + setupHashUploadInstance({md5: true}); + const reqOpts = await performUpload(up, DUMMY_CONTENT, false); + assert.strictEqual(reqOpts.length, 1); + assert.equal( + reqOpts[0].headers!['X-Goog-Hash'], + `md5=${CALCULATED_MD5}` + ); + }); + + it('should include both crc32c and md5 in X-Goog-Hash when both are enabled (via validator)', async () => { + setupHashUploadInstance({crc32c: true, md5: true}); + const reqOpts = await performUpload(up, DUMMY_CONTENT, false); + assert.strictEqual(reqOpts.length, 1); + const xGoogHash = reqOpts[0].headers!['X-Goog-Hash']; + assert.ok(xGoogHash); + const expectedHashes = [ + `crc32c=${CALCULATED_CRC32C}`, + `md5=${CALCULATED_MD5}`, + ]; + const actualHashes = xGoogHash + .split(',') + .map((s: string) => s.trim()); + assert.deepStrictEqual(actualHashes.sort(), expectedHashes.sort()); + }); + + it('should use clientCrc32c if provided (pre-calculated hash)', async () => { + const customCrc32c = 'CUSTOMCRC'; + setupHashUploadInstance({crc32c: true, clientCrc32c: customCrc32c}); + const reqOpts = await performUpload( + up, + DUMMY_CONTENT, + false, + customCrc32c + ); + assert.strictEqual(reqOpts.length, 1); + assert.strictEqual( + reqOpts[0].headers!['X-Goog-Hash'], + `crc32c=${customCrc32c}` + ); + }); + + it('should use clientMd5Hash if provided (pre-calculated hash)', async () => { + const customMd5 = 'CUSTOMMD5'; + setupHashUploadInstance({md5: true, clientMd5Hash: customMd5}); + const reqOpts = await performUpload( + up, + DUMMY_CONTENT, + false, + undefined, + customMd5 + ); + assert.strictEqual(reqOpts.length, 1); + assert.strictEqual( + reqOpts[0].headers!['X-Goog-Hash'], + `md5=${customMd5}` + ); + }); + + it('should not include X-Goog-Hash if neither crc32c nor md5 are enabled', async () => { + setupHashUploadInstance({}); + const reqOpts = await performUpload(up, DUMMY_CONTENT, false); + assert.strictEqual(reqOpts.length, 1); + assert.strictEqual(reqOpts[0].headers!['X-Goog-Hash'], undefined); + }); + }); + + describe('multiple chunk', () => { + beforeEach(() => { + setupHashUploadInstance({ + crc32c: true, + md5: true, + chunkSize: CHUNK_SIZE, + }); + }); + + it('should NOT include X-Goog-Hash header on intermediate multi-chunk requests', async () => { + const reqOpts = await performUpload(up, DUMMY_CONTENT, true); + assert.strictEqual(reqOpts.length, 2); + + assert.strictEqual(reqOpts[0].headers!['Content-Length'], CHUNK_SIZE); + assert.strictEqual(reqOpts[0].headers!['X-Goog-Hash'], undefined); + }); + + it('should include X-Goog-Hash header ONLY on the final multi-chunk request', async () => { + const expectedHashHeader = `crc32c=${CALCULATED_CRC32C},md5=${CALCULATED_MD5}`; + const reqOpts = await performUpload(up, DUMMY_CONTENT, true); + assert.strictEqual(reqOpts.length, 2); + + assert.strictEqual(reqOpts[1].headers!['Content-Length'], CHUNK_SIZE); + assert.equal(reqOpts[1].headers!['X-Goog-Hash'], expectedHashHeader); + }); + }); + }); }); describe('#responseHandler', () => { @@ -1340,6 +1608,63 @@ describe('resumable-upload', () => { up.responseHandler(RESP); }); + it('should destroy the stream on CRC32C checksum mismatch', done => { + const CLIENT_CRC = 'client_hash'; + const SERVER_CRC = 'server_hash'; + const RESP = { + data: { + crc32c: SERVER_CRC, + md5Hash: 'md5_match', + size: '100', + }, + status: 200, + }; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (up as any)['#hashValidator'] = { + crc32cEnabled: true, + md5Enabled: true, + crc32c: CLIENT_CRC, + md5Digest: 'md5_match', + }; + up.upstreamEnded = true; + + up.destroy = (err: Error) => { + assert.strictEqual(err.message, FileExceptionMessages.UPLOAD_MISMATCH); + done(); + }; + + up.responseHandler(RESP); + }); + + it('should destroy the stream on MD5 checksum mismatch', done => { + const CLIENT_MD5 = 'client_md5'; + const SERVER_MD5 = 'server_md5'; + const RESP = { + data: { + crc32c: 'crc32c_match', + md5Hash: SERVER_MD5, + size: '100', + }, + status: 200, + }; + + up.md5 = true; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (up as any)['#hashValidator'] = { + crc32c: 'crc32c_match', + md5Digest: CLIENT_MD5, + }; + up.upstreamEnded = true; + + up.destroy = (err: Error) => { + assert.strictEqual(err.message, FileExceptionMessages.UPLOAD_MISMATCH); + done(); + }; + + up.responseHandler(RESP); + }); + it('should continue with multi-chunk upload when incomplete', done => { const lastByteReceived = 9; @@ -2699,4 +3024,71 @@ describe('resumable-upload', () => { }); }); }); + + describe('Explicit Client-Side Checksum Validation', () => { + const DUMMY_CONTENT = Buffer.alloc(CHUNK_SIZE_MULTIPLE * 2); + let URI = ''; + beforeEach(() => { + up.contentLength = DUMMY_CONTENT.byteLength; + up.clientCrc32c = CORRECT_CLIENT_CRC32C; + URI = 'uri'; + up.createURI = (callback: (error: Error | null, uri: string) => void) => { + up.uri = URI; + up.offset = 0; + callback(null, URI); + }; + }); + + it('should fail and destroy the stream if server-reported CRC32C mismatches client CRC32C', done => { + const EXPECTED_ERROR_MESSAGE_PART = 'CRC32C checksum mismatch.'; + + up.makeRequestStream = async (opts: GaxiosOptions) => { + // eslint-disable-next-line @typescript-eslint/no-unused-vars + let dataReceived = 0; + + await new Promise(resolve => { + opts.body.on('data', (data: Buffer) => { + dataReceived += data.byteLength; + }); + opts.body.on('end', resolve); + }); + + return { + status: 200, + data: { + crc32c: INCORRECT_SERVER_CRC32C, + name: up.file, + bucket: up.bucket, + }, + }; + }; + + up.on('error', (err: Error) => { + assert.ok( + err.message.includes(EXPECTED_ERROR_MESSAGE_PART), + 'Error message must indicate CRC32C mismatch.' + ); + assert.strictEqual(up.uri, URI, 'URI should have been set.'); + + done(); + }); + + up.on('finish', () => { + done( + new Error( + 'Upload should have failed due to checksum mismatch, but succeeded.' + ) + ); + }); + + const upstreamBuffer = new Readable({ + read() { + this.push(DUMMY_CONTENT); + this.push(null); + }, + }); + + upstreamBuffer.pipe(up); + }); + }); }); From 11358127a3c0aa53f0921223fd02e426cfb1c04e Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 12:40:27 +0000 Subject: [PATCH 04/11] fix: Resolve checksum validation failures and add full test suite Adds the 'Validation of Client Checksums Against Server Response' test suite. Fixes test failures in client-provided hash scenarios by updating mock responses to ensure server-reported checksums match the client's expected values. --- src/resumable-upload.ts | 21 +++++++++++++-------- test/resumable-upload.ts | 35 +++++++++++++++++------------------ 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index 17b75db0a..d248ab5d5 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -1068,7 +1068,7 @@ export class Upload extends Writable { headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; // In single chunk mode, if contentLength is set, the entire upload is the final chunk. - const isSingleFinalUpload = !!this.contentLength; + const isSingleFinalUpload = typeof this.contentLength === 'number'; if (isSingleFinalUpload && this.#hashValidator) { this.#hashValidator.end(); @@ -1170,7 +1170,7 @@ export class Upload extends Writable { } this.destroy(err); - } else { + } else if (this.isSuccessfulResponse(resp.status)) { const serverCrc32c = resp.data.crc32c; const serverMd5 = resp.data.md5Hash; @@ -1178,17 +1178,17 @@ export class Upload extends Writable { this.#hashValidator?.crc32c || this.#clientCrc32c; const clientMd5HashToValidate = this.#hashValidator?.md5Digest || this.#clientMd5Hash; - if ( - this.#validateChecksum(clientCrc32cToValidate, serverCrc32c, 'CRC32C') + this.#validateChecksum( + clientCrc32cToValidate, + serverCrc32c, + 'CRC32C' + ) || + this.#validateChecksum(clientMd5HashToValidate, serverMd5, 'MD5') ) { return; } - if (this.#validateChecksum(clientMd5HashToValidate, serverMd5, 'MD5')) { - return; - } - // no need to keep the cache this.#resetLocalBuffersCache(); @@ -1200,6 +1200,11 @@ export class Upload extends Writable { // Allow the object (Upload) to continue naturally so the user's // "finish" event fires. this.emit('uploadFinished'); + } else { + // Handles the case where shouldContinueUploadInAnotherRequest is true + // and the response is not successful (e.g., 308 for a partial upload). + // This is the expected behavior for partial uploads that have finished their chunk. + this.emit('uploadFinished'); } } diff --git a/test/resumable-upload.ts b/test/resumable-upload.ts index 94ce2da80..f035551ca 100644 --- a/test/resumable-upload.ts +++ b/test/resumable-upload.ts @@ -121,6 +121,7 @@ describe('resumable-upload', () => { apiEndpoint: API_ENDPOINT, retryOptions: {...RETRY_OPTIONS}, [GCCL_GCS_CMD_KEY]: 'sample.command', + clientCrc32c: CORRECT_CLIENT_CRC32C, }); }); @@ -1336,15 +1337,10 @@ describe('resumable-upload', () => { statusText: 'OK', } as GaxiosResponse; }, - // Mock getRequestHeaders, which is often called before request getRequestHeaders: async () => ({}), - // Mock getRequestMetadata, which is also used for token retrieval getRequestMetadata: async () => ({}), - // Mock getRequestMetadataAsync getRequestMetadataAsync: async () => ({}), - // Mock getClient, as the library might call this internally getClient: async () => MOCK_AUTH_CLIENT, - // Add any other properties your code uses (e.g., json, credentials) }; /** @@ -3025,7 +3021,7 @@ describe('resumable-upload', () => { }); }); - describe('Explicit Client-Side Checksum Validation', () => { + describe('Validation of Client Checksums Against Server Response', () => { const DUMMY_CONTENT = Buffer.alloc(CHUNK_SIZE_MULTIPLE * 2); let URI = ''; beforeEach(() => { @@ -3043,13 +3039,8 @@ describe('resumable-upload', () => { const EXPECTED_ERROR_MESSAGE_PART = 'CRC32C checksum mismatch.'; up.makeRequestStream = async (opts: GaxiosOptions) => { - // eslint-disable-next-line @typescript-eslint/no-unused-vars - let dataReceived = 0; - await new Promise(resolve => { - opts.body.on('data', (data: Buffer) => { - dataReceived += data.byteLength; - }); + opts.body.on('data', () => {}); opts.body.on('end', resolve); }); @@ -3059,35 +3050,43 @@ describe('resumable-upload', () => { crc32c: INCORRECT_SERVER_CRC32C, name: up.file, bucket: up.bucket, + size: DUMMY_CONTENT.byteLength.toString(), }, + headers: {}, + config: opts, + statusText: 'OK', }; }; + // Expect an error to be emitted. up.on('error', (err: Error) => { + assert.strictEqual(err.message, FileExceptionMessages.UPLOAD_MISMATCH); assert.ok( - err.message.includes(EXPECTED_ERROR_MESSAGE_PART), - 'Error message must indicate CRC32C mismatch.' + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err as any).errors && + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (err as any).errors[0].message.includes(EXPECTED_ERROR_MESSAGE_PART) ); - assert.strictEqual(up.uri, URI, 'URI should have been set.'); - + assert.strictEqual(up.uri, 'uri'); done(); }); + // Ensure the 'finish' event is NOT called. up.on('finish', () => { done( new Error( - 'Upload should have failed due to checksum mismatch, but succeeded.' + 'Upload should have failed due to checksum mismatch, but emitted finish.' ) ); }); + // Start the upload by piping data to the new instance. const upstreamBuffer = new Readable({ read() { this.push(DUMMY_CONTENT); this.push(null); }, }); - upstreamBuffer.pipe(up); }); }); From c2395516948f0a7f9f4ccbbfb6ae385de2e12520 Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 12:49:09 +0000 Subject: [PATCH 05/11] code refactor --- src/resumable-upload.ts | 31 +++++++++++++------------------ 1 file changed, 13 insertions(+), 18 deletions(-) diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index d248ab5d5..085f0c841 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -620,20 +620,16 @@ export class Upload extends Writable { #applyChecksumHeaders(headers: GaxiosOptions['headers']) { const checksums: string[] = []; - if (this.#hashValidator) { - if (this.#hashValidator.crc32cEnabled) { - checksums.push(`crc32c=${this.#hashValidator.crc32c!}`); - } - if (this.#hashValidator.md5Enabled) { - checksums.push(`md5=${this.#hashValidator.md5Digest!}`); - } - } else { - if (this.#clientCrc32c) { - checksums.push(`crc32c=${this.#clientCrc32c}`); - } - if (this.#clientMd5Hash) { - checksums.push(`md5=${this.#clientMd5Hash}`); - } + if (this.#hashValidator?.crc32cEnabled) { + checksums.push(`crc32c=${this.#hashValidator.crc32c!}`); + } else if (this.#clientCrc32c) { + checksums.push(`crc32c=${this.#clientCrc32c}`); + } + + if (this.#hashValidator?.md5Enabled) { + checksums.push(`md5=${this.#hashValidator.md5Digest!}`); + } else if (this.#clientMd5Hash) { + checksums.push(`md5=${this.#clientMd5Hash}`); } if (checksums.length > 0) { @@ -1070,11 +1066,10 @@ export class Upload extends Writable { // In single chunk mode, if contentLength is set, the entire upload is the final chunk. const isSingleFinalUpload = typeof this.contentLength === 'number'; - if (isSingleFinalUpload && this.#hashValidator) { - this.#hashValidator.end(); - } - if (isSingleFinalUpload) { + if (this.#hashValidator) { + this.#hashValidator.end(); + } this.#applyChecksumHeaders(headers); } } From 6eb20366dd53ccdf3eb14b8d6d2e58deff8dfa90 Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 13:20:23 +0000 Subject: [PATCH 06/11] refactor: Combine and parameterize checksum validation tests Refactors four duplicate test cases (CRC32C/MD5 success and failure) into a single, parameterized test block within the 'Validation of Client Checksums Against Server Response' suite. This improves test clarity and reduces code duplication by dynamically generating test scenarios for post-upload hash validation. --- test/resumable-upload.ts | 141 ++++++++++++++++++++++++++------------- 1 file changed, 93 insertions(+), 48 deletions(-) diff --git a/test/resumable-upload.ts b/test/resumable-upload.ts index f035551ca..381044d64 100644 --- a/test/resumable-upload.ts +++ b/test/resumable-upload.ts @@ -59,6 +59,8 @@ const X_GOOG_API_HEADER_REGEX = const USER_AGENT_REGEX = /^gcloud-node-storage\/(?[^W]+)$/; const CORRECT_CLIENT_CRC32C = 'Q2hlY2tzdW0h'; const INCORRECT_SERVER_CRC32C = 'Q2hlY2tzdVUa'; +const CORRECT_CLIENT_MD5 = 'CorrectMD5Hash'; +const INCORRECT_SERVER_MD5 = 'IncorrectMD5Hash'; function mockAuthorizeRequest( code = 200, @@ -122,6 +124,7 @@ describe('resumable-upload', () => { retryOptions: {...RETRY_OPTIONS}, [GCCL_GCS_CMD_KEY]: 'sample.command', clientCrc32c: CORRECT_CLIENT_CRC32C, + clientMd5Hash: CORRECT_CLIENT_MD5, }); }); @@ -3026,7 +3029,6 @@ describe('resumable-upload', () => { let URI = ''; beforeEach(() => { up.contentLength = DUMMY_CONTENT.byteLength; - up.clientCrc32c = CORRECT_CLIENT_CRC32C; URI = 'uri'; up.createURI = (callback: (error: Error | null, uri: string) => void) => { up.uri = URI; @@ -3034,60 +3036,103 @@ describe('resumable-upload', () => { callback(null, URI); }; }); + const checksumScenarios = [ + { + type: 'CRC32C', + match: true, + desc: 'successfully finish the upload if server-reported CRC32C matches client CRC32C', + serverCrc: CORRECT_CLIENT_CRC32C, + serverMd5: CORRECT_CLIENT_MD5, + }, + { + type: 'CRC32C', + match: false, + desc: 'fail and destroy the stream if server-reported CRC32C mismatches client CRC32C', + serverCrc: INCORRECT_SERVER_CRC32C, + serverMd5: CORRECT_CLIENT_MD5, + errorPart: 'CRC32C checksum mismatch.', + }, + { + type: 'MD5', + match: true, + desc: 'successfully finish the upload if server-reported MD5 matches client MD5', + serverCrc: CORRECT_CLIENT_CRC32C, + serverMd5: CORRECT_CLIENT_MD5, + }, + { + type: 'MD5', + match: false, + desc: 'fail and destroy the stream if server-reported MD5 mismatches client MD5', + serverCrc: CORRECT_CLIENT_CRC32C, + serverMd5: INCORRECT_SERVER_MD5, + errorPart: 'MD5 checksum mismatch.', + }, + ]; + + checksumScenarios.forEach(scenario => { + it(`should ${scenario.desc}`, done => { + up.makeRequestStream = async (opts: GaxiosOptions) => { + await new Promise(resolve => { + opts.body.on('data', () => {}); + opts.body.on('end', resolve); + }); - it('should fail and destroy the stream if server-reported CRC32C mismatches client CRC32C', done => { - const EXPECTED_ERROR_MESSAGE_PART = 'CRC32C checksum mismatch.'; - - up.makeRequestStream = async (opts: GaxiosOptions) => { - await new Promise(resolve => { - opts.body.on('data', () => {}); - opts.body.on('end', resolve); - }); - - return { - status: 200, - data: { - crc32c: INCORRECT_SERVER_CRC32C, - name: up.file, - bucket: up.bucket, - size: DUMMY_CONTENT.byteLength.toString(), - }, - headers: {}, - config: opts, - statusText: 'OK', + return { + status: 200, + data: { + crc32c: scenario.serverCrc, + md5Hash: scenario.serverMd5, + name: up.file, + bucket: up.bucket, + size: DUMMY_CONTENT.byteLength.toString(), + }, + headers: {}, + config: opts, + statusText: 'OK', + }; }; - }; - // Expect an error to be emitted. - up.on('error', (err: Error) => { - assert.strictEqual(err.message, FileExceptionMessages.UPLOAD_MISMATCH); - assert.ok( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (err as any).errors && + if (scenario.match) { + up.on('error', (err: Error) => { + done(new Error(`Upload failed unexpectedly: ${err.message}`)); + }); + up.on('finish', () => { + done(); + }); + } else { + up.on('error', (err: Error) => { + assert.strictEqual( + err.message, + FileExceptionMessages.UPLOAD_MISMATCH + ); + // eslint-disable-next-line @typescript-eslint/no-explicit-any - (err as any).errors[0].message.includes(EXPECTED_ERROR_MESSAGE_PART) - ); - assert.strictEqual(up.uri, 'uri'); - done(); - }); + const detailError = (err as any).errors && (err as any).errors[0]; + assert.ok( + detailError && detailError.message.includes(scenario.errorPart!), + `Error message should contain: ${scenario.errorPart}` + ); + assert.strictEqual(up.uri, URI); + done(); + }); - // Ensure the 'finish' event is NOT called. - up.on('finish', () => { - done( - new Error( - 'Upload should have failed due to checksum mismatch, but emitted finish.' - ) - ); - }); + up.on('finish', () => { + done( + new Error( + `Upload should have failed due to ${scenario.type} mismatch, but emitted finish.` + ) + ); + }); + } - // Start the upload by piping data to the new instance. - const upstreamBuffer = new Readable({ - read() { - this.push(DUMMY_CONTENT); - this.push(null); - }, + const upstreamBuffer = new Readable({ + read() { + this.push(DUMMY_CONTENT); + this.push(null); + }, + }); + upstreamBuffer.pipe(up); }); - upstreamBuffer.pipe(up); }); }); }); From deabdfc5ec1f2d8b8576efe0b92b4f16fd3f236f Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 14:37:55 +0000 Subject: [PATCH 07/11] addressing comments --- src/hash-stream-validator.ts | 2 +- src/resumable-upload.ts | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/hash-stream-validator.ts b/src/hash-stream-validator.ts index 0c3a2ca72..0c6558ca9 100644 --- a/src/hash-stream-validator.ts +++ b/src/hash-stream-validator.ts @@ -93,7 +93,7 @@ class HashStreamValidator extends Transform { _flush(callback: (error?: Error | null | undefined) => void) { if (this.#md5Hash && !this.#md5Digest) { - this.#md5Digest = this.#md5Hash.digest('base64'); + void this.#md5Digest; } if (this.updateHashesOnly) { diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index 085f0c841..f33a8c1cf 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -1066,7 +1066,7 @@ export class Upload extends Writable { // In single chunk mode, if contentLength is set, the entire upload is the final chunk. const isSingleFinalUpload = typeof this.contentLength === 'number'; - if (isSingleFinalUpload) { + if (isSingleFinalUpload && this.upstreamEnded) { if (this.#hashValidator) { this.#hashValidator.end(); } @@ -1169,6 +1169,10 @@ export class Upload extends Writable { const serverCrc32c = resp.data.crc32c; const serverMd5 = resp.data.md5Hash; + if (this.#hashValidator) { + this.#hashValidator.end(); + } + const clientCrc32cToValidate = this.#hashValidator?.crc32c || this.#clientCrc32c; const clientMd5HashToValidate = From 4c7cab5504359b641c2f28cc49d0f3ffc3624eea Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Thu, 4 Dec 2025 14:49:26 +0000 Subject: [PATCH 08/11] bug fix --- src/hash-stream-validator.ts | 2 +- src/resumable-upload.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/hash-stream-validator.ts b/src/hash-stream-validator.ts index 0c6558ca9..0c3a2ca72 100644 --- a/src/hash-stream-validator.ts +++ b/src/hash-stream-validator.ts @@ -93,7 +93,7 @@ class HashStreamValidator extends Transform { _flush(callback: (error?: Error | null | undefined) => void) { if (this.#md5Hash && !this.#md5Digest) { - void this.#md5Digest; + this.#md5Digest = this.#md5Hash.digest('base64'); } if (this.updateHashesOnly) { diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index f33a8c1cf..222dc959d 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -1066,7 +1066,7 @@ export class Upload extends Writable { // In single chunk mode, if contentLength is set, the entire upload is the final chunk. const isSingleFinalUpload = typeof this.contentLength === 'number'; - if (isSingleFinalUpload && this.upstreamEnded) { + if (isSingleFinalUpload) { if (this.#hashValidator) { this.#hashValidator.end(); } From 47aafac4112a0dc79c203a06ef7128297eb4938e Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Mon, 8 Dec 2025 09:46:53 +0000 Subject: [PATCH 09/11] added system test --- system-test/kitchen.ts | 81 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/system-test/kitchen.ts b/system-test/kitchen.ts index fbdd139d1..bcdee3614 100644 --- a/system-test/kitchen.ts +++ b/system-test/kitchen.ts @@ -305,4 +305,85 @@ describe('resumable-upload', () => { ); assert.equal(results.size, FILE_SIZE); }); + + const INTEGRITY_FILE_SIZE = 1024 * 512; + const KNOWN_CRC32C_OF_ZEROS = 'rthIWA=='; + describe('Validation of Client Checksums Against Server Response (Integration)', () => { + let integrityFilePath: string; + let integrityFileBuffer: Buffer; + + before(async () => { + integrityFilePath = path.join(os.tmpdir(), '512KB_rand.dat'); + integrityFileBuffer = crypto.randomBytes(INTEGRITY_FILE_SIZE); + await fs.promises.writeFile(integrityFilePath, integrityFileBuffer); + }); + + after(async () => { + await fs.promises.rm(integrityFilePath, {force: true}); + }); + + it('should upload successfully when crc32c calculation is enabled', done => { + let uploadSucceeded = false; + + fs.createReadStream(integrityFilePath) + .on('error', done) + .pipe( + upload({ + bucket: bucketName, + file: integrityFilePath, + crc32c: true, + retryOptions: retryOptions, + userProject: 'storage-sdk-vendor', + }) + ) + .on('error', err => { + done(new Error(`Upload failed unexpectedly on success path: ${err}`)); + }) + .on('response', resp => { + uploadSucceeded = resp.status === 200; + }) + .on('finish', () => { + assert.strictEqual(uploadSucceeded, true); + done(); + }); + }); + + it('should destroy the stream on a checksum mismatch (client-provided hash mismatch)', done => { + const EXPECTED_ERROR_MESSAGE_PART = 'checksum mismatch'; + + fs.createReadStream(integrityFilePath) + .on('error', done) + .pipe( + upload({ + bucket: bucketName, + file: integrityFilePath, + // ⚠️ Force a mismatch by providing a known incorrect hash for the file content + clientCrc32c: KNOWN_CRC32C_OF_ZEROS, + retryOptions: retryOptions, + userProject: 'storage-sdk-vendor', + }) + ) + .on('response', () => { + done( + new Error( + 'Upload succeeded when it should have failed due to checksum mismatch.' + ) + ); + }) + .on('finish', () => { + done( + new Error( + 'Upload finished successfully when it should have failed.' + ) + ); + }) + .on('error', (err: Error) => { + assert.ok( + err.message.includes(EXPECTED_ERROR_MESSAGE_PART), + `Expected error message part "${EXPECTED_ERROR_MESSAGE_PART}" not found in: ${err.message}` + ); + done(); + }); + }); + }); }); From 674b1d4e90c01600ed30ed20af95dc02d899c603 Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Tue, 9 Dec 2025 08:48:43 +0000 Subject: [PATCH 10/11] fix system test --- src/resumable-upload.ts | 15 ++++++------ system-test/kitchen.ts | 52 ++++++++++++++--------------------------- 2 files changed, 25 insertions(+), 42 deletions(-) diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index 222dc959d..67ce8d885 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -1063,15 +1063,16 @@ export class Upload extends Writable { } else { headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; - // In single chunk mode, if contentLength is set, the entire upload is the final chunk. - const isSingleFinalUpload = typeof this.contentLength === 'number'; + // Buffer the entire upload to calculate the hash before sending. + for await (const chunk of this.upstreamIterator(expectedUploadSize)) { + this.#addLocalBufferCache(chunk); + } + this.prependLocalBufferToUpstream(); - if (isSingleFinalUpload) { - if (this.#hashValidator) { - this.#hashValidator.end(); - } - this.#applyChecksumHeaders(headers); + if (this.#hashValidator) { + this.#hashValidator.end(); } + this.#applyChecksumHeaders(headers); } const reqOpts: GaxiosOptions = { diff --git a/system-test/kitchen.ts b/system-test/kitchen.ts index bcdee3614..e63615f7b 100644 --- a/system-test/kitchen.ts +++ b/system-test/kitchen.ts @@ -35,6 +35,7 @@ import { RETRYABLE_ERR_FN_DEFAULT, Storage, } from '../src/storage.js'; +import {CRC32C} from '../src/crc32c.js'; const bucketName = process.env.BUCKET_NAME || 'gcs-resumable-upload-test'; @@ -306,38 +307,34 @@ describe('resumable-upload', () => { assert.equal(results.size, FILE_SIZE); }); - const INTEGRITY_FILE_SIZE = 1024 * 512; const KNOWN_CRC32C_OF_ZEROS = 'rthIWA=='; - describe('Validation of Client Checksums Against Server Response (Integration)', () => { - let integrityFilePath: string; - let integrityFileBuffer: Buffer; + describe('Validation of Client Checksums Against Server Response', () => { + let crc32c: string; before(async () => { - integrityFilePath = path.join(os.tmpdir(), '512KB_rand.dat'); - integrityFileBuffer = crypto.randomBytes(INTEGRITY_FILE_SIZE); - await fs.promises.writeFile(integrityFilePath, integrityFileBuffer); + crc32c = (await CRC32C.fromFile(filePath)).toString(); }); - - after(async () => { - await fs.promises.rm(integrityFilePath, {force: true}); - }); - it('should upload successfully when crc32c calculation is enabled', done => { let uploadSucceeded = false; - fs.createReadStream(integrityFilePath) + fs.createReadStream(filePath) .on('error', done) .pipe( upload({ bucket: bucketName, - file: integrityFilePath, + file: filePath, crc32c: true, + clientCrc32c: crc32c, retryOptions: retryOptions, - userProject: 'storage-sdk-vendor', }) ) .on('error', err => { - done(new Error(`Upload failed unexpectedly on success path: ${err}`)); + console.log(err); + done( + new Error( + `Upload failed unexpectedly on success path: ${err.message}` + ) + ); }) .on('response', resp => { uploadSucceeded = resp.status === 200; @@ -349,34 +346,19 @@ describe('resumable-upload', () => { }); it('should destroy the stream on a checksum mismatch (client-provided hash mismatch)', done => { - const EXPECTED_ERROR_MESSAGE_PART = 'checksum mismatch'; + const EXPECTED_ERROR_MESSAGE_PART = `Provided CRC32C "${KNOWN_CRC32C_OF_ZEROS}" doesn't match calculated CRC32C`; - fs.createReadStream(integrityFilePath) + fs.createReadStream(filePath) .on('error', done) .pipe( upload({ bucket: bucketName, - file: integrityFilePath, - // ⚠️ Force a mismatch by providing a known incorrect hash for the file content + file: filePath, clientCrc32c: KNOWN_CRC32C_OF_ZEROS, + crc32c: true, retryOptions: retryOptions, - userProject: 'storage-sdk-vendor', }) ) - .on('response', () => { - done( - new Error( - 'Upload succeeded when it should have failed due to checksum mismatch.' - ) - ); - }) - .on('finish', () => { - done( - new Error( - 'Upload finished successfully when it should have failed.' - ) - ); - }) .on('error', (err: Error) => { assert.ok( err.message.includes(EXPECTED_ERROR_MESSAGE_PART), From 7e631dc869326b32085c7cab5cb38935b1b6a7f2 Mon Sep 17 00:00:00 2001 From: Thiyagu K Date: Tue, 9 Dec 2025 10:04:34 +0000 Subject: [PATCH 11/11] bug fix --- src/resumable-upload.ts | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/resumable-upload.ts b/src/resumable-upload.ts index 67ce8d885..507e3c1f8 100644 --- a/src/resumable-upload.ts +++ b/src/resumable-upload.ts @@ -1063,12 +1063,6 @@ export class Upload extends Writable { } else { headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; - // Buffer the entire upload to calculate the hash before sending. - for await (const chunk of this.upstreamIterator(expectedUploadSize)) { - this.#addLocalBufferCache(chunk); - } - this.prependLocalBufferToUpstream(); - if (this.#hashValidator) { this.#hashValidator.end(); }