diff --git a/.github/actions/javascript/getPullRequestIncrementalChanges/index.js b/.github/actions/javascript/getPullRequestIncrementalChanges/index.js index 5cce6f0b14860..5ec39fec1476a 100644 --- a/.github/actions/javascript/getPullRequestIncrementalChanges/index.js +++ b/.github/actions/javascript/getPullRequestIncrementalChanges/index.js @@ -12746,6 +12746,24 @@ class Git { hasChanges: files.length > 0, }; } + /** + * Check if a file from a Git diff is added. + * + * @param file - The file to check + * @returns true if the file is added, false otherwise + */ + static isAddedDiffFile(file) { + const hasAddedLines = file.addedLines.size > 0; + const hasModifiedLines = file.modifiedLines.size > 0; + const hasRemovedLines = file.removedLines.size > 0; + if (!hasAddedLines || hasModifiedLines || hasRemovedLines) { + return false; + } + if (file.hunks.length === 1 && file.hunks.at(0)?.oldStart === 0 && file.hunks.at(0)?.oldCount === 0) { + return true; + } + return false; + } /** * Calculate the line number for a diff line based on the hunk and line type. */ diff --git a/.github/workflows/react-compiler-compliance.yml b/.github/workflows/react-compiler-compliance.yml index 7dfb463240f40..2b1796f0f17a7 100644 --- a/.github/workflows/react-compiler-compliance.yml +++ b/.github/workflows/react-compiler-compliance.yml @@ -28,7 +28,7 @@ jobs: # In phase 0 of the React Compiler compliance check rollout, # we want to report failures but don't fail the check. # See https://github.com/Expensify/App/issues/68765#issuecomment-3487317881 - run: npm run react-compiler-compliance-check check-changed -- --filterByDiff || true + run: npm run react-compiler-compliance-check check-changed -- --filterByDiff --enforceNewComponents env: CI: true GITHUB_TOKEN: ${{ github.token }} diff --git a/scripts/react-compiler-compliance-check.ts b/scripts/react-compiler-compliance-check.ts index b58ea40848c12..3e206c9190bfb 100644 --- a/scripts/react-compiler-compliance-check.ts +++ b/scripts/react-compiler-compliance-check.ts @@ -7,11 +7,13 @@ * It provides both CI and local development tools to enforce Rules of React compliance. */ import {execSync} from 'child_process'; -import {writeFileSync} from 'fs'; -import {join} from 'path'; +import fs, {readFileSync} from 'fs'; +import path from 'path'; import type {TupleToUnion} from 'type-fest'; import CLI from './utils/CLI'; +import FileUtils from './utils/FileUtils'; import Git from './utils/Git'; +import type {DiffResult} from './utils/Git'; import {log, bold as logBold, error as logError, info as logInfo, note as logNote, success as logSuccess, warn as logWarn} from './utils/Logger'; const TAB = ' '; @@ -24,6 +26,19 @@ const SUPPRESSED_COMPILER_ERRORS = [ '(BuildHIR::lowerExpression) Expected Identifier, got MemberExpression key in ObjectExpression', ] as const satisfies string[]; +const MANUAL_MEMOIZATION_PATTERNS = { + memo: /\b(?:React\.)?memo\s*\(/g, + useMemo: /\b(?:React\.)?useMemo\s*\(/g, + useCallback: /\b(?:React\.)?useCallback\s*\(/g, +} as const satisfies Record; + +type ManualMemoizationKeyword = keyof typeof MANUAL_MEMOIZATION_PATTERNS; + +const MANUAL_MEMOIZATION_FAILURE_MESSAGE = (manualMemoizationKeyword: ManualMemoizationKeyword) => + `Found a manual memoization usage of \`${manualMemoizationKeyword}\`. Newly added React component files must not contain any manual memoization and instead be auto-memoized by React Compiler. Remove \`${manualMemoizationKeyword}\` or disable automatic memoization by adding the \`"use no memo";\` directive at the beginning of the component and give a reason why automatic memoization is not applicable.`; + +const NO_MANUAL_MEMO_DIRECTIVE_PATTERN = /["']use no memo["']\s*;?/; + const ESLINT_DISABLE_PATTERNS = { FILE_KEYWORDS: ['// eslint-disable ', '/* eslint-disable '], LINE_KEYWORDS: ['// eslint-disable-next-line ', '/* eslint-disable-next-line '], @@ -37,14 +52,15 @@ const VERBOSE_OUTPUT_LINE_REGEXES = { REASON: /Reason: (.+)/, } as const satisfies Record; -type FailureMap = Map; - type CompilerResults = { success: Set; failures: FailureMap; suppressedFailures: FailureMap; + enforcedAddedComponentFailures?: EnforcedAddedComponentFailureMap; }; +type FailureMap = Map; + type CompilerFailure = { file: string; line?: number; @@ -52,6 +68,19 @@ type CompilerFailure = { reason?: string; }; +type EnforcedAddedComponentFailureMap = Map; + +type ManualMemoFailure = { + manualMemoizationMatches: ManualMemoizationMatch[]; + compilerFailures: FailureMap | undefined; +}; + +type ManualMemoizationMatch = { + keyword: ManualMemoizationKeyword; + line: number; + column: number; +}; + type DiffFilteringCommits = { fromRef: string; toRef?: string; @@ -67,6 +96,7 @@ type BaseCheckOptions = PrintResultsOptions & { reportFileName?: string; shouldGenerateReport?: boolean; shouldFilterByDiff?: boolean; + shouldEnforceNewComponents?: boolean; }; type CheckOptions = BaseCheckOptions & { @@ -81,6 +111,7 @@ async function check({ remote, shouldPrintSuccesses = false, shouldPrintSuppressedErrors = false, + shouldEnforceNewComponents = false, }: CheckOptions): Promise { if (files) { logInfo(`Running React Compiler check for ${files.length} files or glob patterns...`); @@ -91,11 +122,21 @@ async function check({ const src = createFilesGlob(files); let results = runCompilerHealthcheck(src); - if (shouldFilterByDiff) { + if (shouldFilterByDiff || shouldEnforceNewComponents) { const mainBaseCommitHash = await Git.getMainBranchCommitHash(remote); const diffFilteringCommits: DiffFilteringCommits = {fromRef: mainBaseCommitHash}; + const diffResult = Git.diff(diffFilteringCommits.fromRef, diffFilteringCommits.toRef); - results = await filterResultsByDiff(results, diffFilteringCommits, {shouldPrintSuccesses, shouldPrintSuppressedErrors}); + if (shouldFilterByDiff) { + results = await filterResultsByDiff(results, diffFilteringCommits, diffResult, {shouldPrintSuccesses, shouldPrintSuppressedErrors}); + } + + if (shouldEnforceNewComponents) { + const {nonAutoMemoEnforcedFailures, addedComponentFailures} = enforceNewComponentGuard(results, diffResult); + + results.enforcedAddedComponentFailures = addedComponentFailures; + results.failures = nonAutoMemoEnforcedFailures; + } } printResults(results, {shouldPrintSuccesses, shouldPrintSuppressedErrors}); @@ -104,7 +145,7 @@ async function check({ generateReport(results, reportFileName); } - const isPassed = results.failures.size === 0; + const isPassed = results.failures.size === 0 && (results.enforcedAddedComponentFailures?.size ?? 0) === 0; return isPassed; } @@ -158,6 +199,11 @@ function addFailureIfDoesNotExist(failureMap: FailureMap, newFailure: CompilerFa return true; } +/** + * Parses the output of the react-compiler-healthcheck command and returns the compiler results. + * @param output - The output of the react-compiler-healthcheck command + * @returns The compiler results + */ function parseHealthcheckOutput(output: string): CompilerResults { const lines = output.split('\n'); @@ -244,6 +290,11 @@ function parseHealthcheckOutput(output: string): CompilerResults { return results; } +/** + * Checks if a compiler error should be suppressed based on the error reason. + * @param reason - The reason for the compiler error + * @returns True if the error should be suppressed, false otherwise + */ function shouldSuppressCompilerError(reason: string | undefined): boolean { if (!reason) { return false; @@ -253,6 +304,11 @@ function shouldSuppressCompilerError(reason: string | undefined): boolean { return SUPPRESSED_COMPILER_ERRORS.some((suppressedError) => reason.includes(suppressedError)); } +/** + * Creates a unique key for a compiler failure by combining the file path, line number, and column number. + * @param failure - The compiler failure to create a unique key for + * @returns A unique key for the compiler failure + */ function getUniqueFileKey({file, line, column}: CompilerFailure): string { const isLineSet = line !== undefined; const isLineAndColumnSet = isLineSet && column !== undefined; @@ -260,6 +316,11 @@ function getUniqueFileKey({file, line, column}: CompilerFailure): string { return file + (isLineSet ? `:${line}` : '') + (isLineAndColumnSet ? `:${column}` : ''); } +/** + * Creates a glob pattern from an array of file paths. + * @param files - The file paths to create a glob pattern from + * @returns A glob pattern string + */ function createFilesGlob(files?: string[]): string | undefined { if (!files || files.length === 0) { return undefined; @@ -287,13 +348,11 @@ function createFilesGlob(files?: string[]): string | undefined { async function filterResultsByDiff( results: CompilerResults, diffFilteringCommits: DiffFilteringCommits, + diffResult: DiffResult, {shouldPrintSuccesses, shouldPrintSuppressedErrors}: PrintResultsOptions, ): Promise { logInfo(`Filtering results by diff between ${diffFilteringCommits.fromRef} and ${diffFilteringCommits.toRef ?? 'the working tree'}...`); - // Get the diff between the base ref and the working tree - const diffResult = Git.diff(diffFilteringCommits.fromRef, diffFilteringCommits.toRef); - // If there are no changes, return empty results if (!diffResult.hasChanges) { return { @@ -354,12 +413,16 @@ async function filterResultsByDiff( } } - // Filter failures to only include those on changed lines and files/chunks for which an eslint-disable comment is was removed + /** + * Filter failures to only include those on changed lines and files/chunks for which an eslint-disable comment is was removed + * @param failures - The unfiltered compiler failures + * @returns The filtered compiler failures + */ function filterFailuresByChangedLines(failures: Map) { // Filter failures to only include those on changed lines const filteredFailures = new Map(); - for (const [key, failure] of failures.entries()) { + for (const [key, failure] of failures) { const changedLines = changedLinesMap.get(failure.file); // If the file is not in the diff, skip this failure @@ -434,7 +497,132 @@ async function filterResultsByDiff( }; } -function printResults({success, failures, suppressedFailures}: CompilerResults, {shouldPrintSuccesses, shouldPrintSuppressedErrors}: PrintResultsOptions): void { +/** + * Enforces the new component guard by checking for manual memoization keywords in added files and attaching React compiler failures. + * @param failures - The compiler results to enforce the new component guard on + * @param diffResult - The diff result to check for added files + * @returns The enforced compiler results + */ +function enforceNewComponentGuard({failures}: CompilerResults, diffResult: DiffResult) { + const addedDiffFiles = new Set(); + for (const file of diffResult.files) { + if (Git.isAddedDiffFile(file)) { + addedDiffFiles.add(file.filePath); + } + } + + // Partition failures into non-auto memo enforced failures and added file failures + const nonAutoMemoEnforcedFailures: FailureMap = new Map(); + const addedFileFailures = new Map(); + for (const [failureKey, failure] of failures) { + const addedFilePath = failure.file; + + if (!addedDiffFiles.has(addedFilePath)) { + nonAutoMemoEnforcedFailures.set(failureKey, failure); + continue; + } + + const existingAddedFileFailuresMap = addedFileFailures.get(addedFilePath); + if (existingAddedFileFailuresMap) { + existingAddedFileFailuresMap.set(failureKey, failure); + continue; + } + + addedFileFailures.set(addedFilePath, new Map([[failureKey, failure]])); + } + + // Used as fallback to add back the failures from added files that didn't have manual memoization + function addNonAutoMemoEnforcedFailures(addedFilePath: string): void { + const addedFileFailuresMap = addedFileFailures.get(addedFilePath); + + if (!addedFileFailuresMap) { + return; + } + + for (const [failureKey, failure] of addedFileFailuresMap) { + nonAutoMemoEnforcedFailures.set(failureKey, failure); + } + } + + // Check all added files for manual memoization keywords and attach React compiler failures. + // If no manual memoization keywords are found add the failures back to the regular failures. + const addedComponentFailures: EnforcedAddedComponentFailureMap = new Map(); + for (const addedFilePath of addedDiffFiles) { + let source: string | null = null; + try { + const absolutePath = path.join(process.cwd(), addedFilePath); + source = readFileSync(absolutePath, 'utf8'); + } catch (error) { + logWarn(`Unable to read ${addedFilePath} while enforcing new component rules.`, error); + } + + if (!source || NO_MANUAL_MEMO_DIRECTIVE_PATTERN.test(source)) { + addNonAutoMemoEnforcedFailures(addedFilePath); + continue; + } + + const manualMemoizationMatches = findManualMemoizationMatches(source); + + if (manualMemoizationMatches.length === 0) { + addNonAutoMemoEnforcedFailures(addedFilePath); + continue; + } + + const manualMemoFailure: ManualMemoFailure = { + manualMemoizationMatches, + compilerFailures: addedFileFailures.get(addedFilePath), + }; + addedComponentFailures.set(addedFilePath, manualMemoFailure); + } + + return { + nonAutoMemoEnforcedFailures, + addedComponentFailures, + }; +} + +/** + * Finds all manual memoization keywords matches in source file and returns their line and column numbers. + * @param source - The source code to search for manual memoization matches + * @returns An array of manual memoization matches + */ +function findManualMemoizationMatches(source: string): ManualMemoizationMatch[] { + const matches: ManualMemoizationMatch[] = []; + + for (const keyword of Object.keys(MANUAL_MEMOIZATION_PATTERNS) as ManualMemoizationKeyword[]) { + const regex = MANUAL_MEMOIZATION_PATTERNS[keyword]; + const regexMatches = source.matchAll(regex); + + for (const regexMatch of regexMatches) { + const matchIndex = regexMatch.index; + if (matchIndex === undefined) { + continue; + } + const {line, column} = FileUtils.getLineAndColumnFromIndex(source, matchIndex); + matches.push({keyword, line, column}); + } + } + + // Sort matches by line number first, then by column number + matches.sort((a, b) => { + if (a.line !== b.line) { + return a.line - b.line; + } + return a.column - b.column; + }); + + return matches; +} + +/** + * Prints the results of the React Compiler compliance check. + * @param results - The compiler results to print + * @param options - The options for printing the results + */ +function printResults( + {success, failures, suppressedFailures, enforcedAddedComponentFailures}: CompilerResults, + {shouldPrintSuccesses, shouldPrintSuppressedErrors}: PrintResultsOptions, +): void { if (shouldPrintSuccesses && success.size > 0) { log(); logSuccess(`Successfully compiled ${success.size} files with React Compiler:`); @@ -466,7 +654,7 @@ function printResults({success, failures, suppressedFailures}: CompilerResults, logWarn(`Suppressed the following errors in these files:`); log(); - for (const [error, suppressedErrorFiles] of suppressedErrorMap.entries()) { + for (const [error, suppressedErrorFiles] of suppressedErrorMap) { logBold(error); const filesLine = suppressedErrorFiles.map((failure) => getUniqueFileKey(failure)).join(', '); logNote(`${TAB} - ${filesLine}`); @@ -475,38 +663,71 @@ function printResults({success, failures, suppressedFailures}: CompilerResults, log(); } - const isPassed = failures.size === 0; + const hasEnforcedAddedComponentFailures = enforcedAddedComponentFailures && enforcedAddedComponentFailures.size > 0; + + const isPassed = failures.size === 0 && !hasEnforcedAddedComponentFailures; if (isPassed) { logSuccess('All files pass React Compiler compliance check!'); return; } const distinctFileNames = new Set(); - for (const [, failure] of failures) { + for (const failure of failures.values()) { distinctFileNames.add(failure.file); } - log(); - logError(`Failed to compile ${distinctFileNames.size} files with React Compiler:`); - log(); + if (distinctFileNames.size > 0) { + log(); + logError(`Failed to compile ${distinctFileNames.size} files with React Compiler:`); + log(); - for (const [, failure] of failures) { - const location = failure.line && failure.column ? `:${failure.line}:${failure.column}` : ''; - logBold(`${failure.file}${location}`); - logNote(`${TAB}${failure.reason ?? 'No reason provided'}`); + printFailures(failures); + } + + if (hasEnforcedAddedComponentFailures) { + log(); + logError(`The following newly added components should rely on React Compiler’s automatic memoization (manual memoization is not allowed):`); + log(); + + for (const [filePath, {manualMemoizationMatches, compilerFailures}] of enforcedAddedComponentFailures) { + for (const manualMemoizationMatch of manualMemoizationMatches) { + const location = manualMemoizationMatch.line && manualMemoizationMatch.column ? `:${manualMemoizationMatch.line}:${manualMemoizationMatch.column}` : ''; + logBold(`${filePath}${location}`); + logNote(`${TAB}${MANUAL_MEMOIZATION_FAILURE_MESSAGE(manualMemoizationMatch.keyword)}`); + } + + if (compilerFailures) { + log(); + logBold(`${TAB}Additional React Compiler errors:`); + printFailures(compilerFailures, 1); + } + } } log(); - logError('The files above failed to compile with React Compiler, probably because of Rules of React violations. Please fix the issues and run the check again.'); + logError('The files above failed the React Compiler compliance check. Please fix the issues and run the check again...'); } +function printFailures(failuresToPrint: FailureMap, level = 0) { + for (const failure of failuresToPrint.values()) { + const location = failure.line && failure.column ? `:${failure.line}:${failure.column}` : ''; + logBold(`${TAB.repeat(level)}${failure.file}${location}`); + logNote(`${TAB.repeat(level + 1)}${failure.reason ?? 'No reason provided'}`); + } +} + +/** + * Generates a report of the React Compiler compliance check. + * @param results - The compiler results to generate a report for + * @param outputFileName - The file name to save the report to + */ function generateReport(results: CompilerResults, outputFileName = DEFAULT_REPORT_FILENAME): void { log('\n'); logInfo('Creating React Compiler Compliance Check report:'); // Save detailed report - const reportFile = join(process.cwd(), outputFileName); - writeFileSync( + const reportFile = path.join(process.cwd(), outputFileName); + fs.writeFileSync( reportFile, JSON.stringify( { @@ -591,14 +812,32 @@ async function main() { required: false, default: false, }, + enforceNewComponents: { + description: 'Ensure new components compile with React Compiler and avoid manual memoization', + required: false, + default: false, + }, }, }); const {command, file} = cli.positionalArgs; const {remote, reportFileName} = cli.namedArgs; - const {report: shouldGenerateReport, filterByDiff: shouldFilterByDiff, printSuccesses: shouldPrintSuccesses, printSuppressedErrors: shouldPrintSuppressedErrors} = cli.flags; - - const commonOptions: BaseCheckOptions = {shouldGenerateReport, reportFileName, shouldFilterByDiff, shouldPrintSuccesses, shouldPrintSuppressedErrors}; + const { + report: shouldGenerateReport, + filterByDiff: shouldFilterByDiff, + printSuccesses: shouldPrintSuccesses, + printSuppressedErrors: shouldPrintSuppressedErrors, + enforceNewComponents: shouldEnforceNewComponents, + } = cli.flags; + + const commonOptions: BaseCheckOptions = { + shouldGenerateReport, + reportFileName, + shouldFilterByDiff, + shouldPrintSuccesses, + shouldPrintSuppressedErrors, + shouldEnforceNewComponents, + }; async function runCommand() { switch (command) { diff --git a/scripts/utils/FileUtils.ts b/scripts/utils/FileUtils.ts new file mode 100644 index 0000000000000..83e227c041da2 --- /dev/null +++ b/scripts/utils/FileUtils.ts @@ -0,0 +1,37 @@ +const ERROR_MESSAGES = { + SOURCE_CANNOT_BE_EMPTY: 'Source cannot be empty', + INDEX_CANNOT_BE_NEGATIVE: 'Index cannot be negative', + // eslint-disable-next-line @typescript-eslint/naming-convention + INDEX_OUT_OF_BOUNDS: (sourceLength: number, index: number) => `Index ${index} is out of bounds for source length ${sourceLength}`, +} as const; + +const FileUtils = { + /** + * Get the line and column from an index in a source string. + * @param source - The source string. + * @param index - The index in the source string. + * @returns The line and column. + */ + getLineAndColumnFromIndex: (source: string, index: number): {line: number; column: number} => { + if (source.length === 0) { + throw new Error(ERROR_MESSAGES.SOURCE_CANNOT_BE_EMPTY); + } + + if (index < 0) { + throw new Error(ERROR_MESSAGES.INDEX_CANNOT_BE_NEGATIVE); + } + + if (index > source.length) { + throw new Error(ERROR_MESSAGES.INDEX_OUT_OF_BOUNDS(source.length, index)); + } + + const substring = source.slice(0, index); + const line = substring.split('\n').length; + const lastLineBreakIndex = substring.lastIndexOf('\n'); + const column = lastLineBreakIndex === -1 ? index + 1 : index - lastLineBreakIndex; + return {line, column}; + }, +}; + +export default FileUtils; +export {ERROR_MESSAGES}; diff --git a/scripts/utils/Git.ts b/scripts/utils/Git.ts index 4b5ed7a2d9b9b..bdbe22918c464 100644 --- a/scripts/utils/Git.ts +++ b/scripts/utils/Git.ts @@ -278,6 +278,28 @@ class Git { }; } + /** + * Check if a file from a Git diff is added. + * + * @param file - The file to check + * @returns true if the file is added, false otherwise + */ + static isAddedDiffFile(file: FileDiff): boolean { + const hasAddedLines = file.addedLines.size > 0; + const hasModifiedLines = file.modifiedLines.size > 0; + const hasRemovedLines = file.removedLines.size > 0; + + if (!hasAddedLines || hasModifiedLines || hasRemovedLines) { + return false; + } + + if (file.hunks.length === 1 && file.hunks.at(0)?.oldStart === 0 && file.hunks.at(0)?.oldCount === 0) { + return true; + } + + return false; + } + /** * Calculate the line number for a diff line based on the hunk and line type. */ diff --git a/tests/unit/GitTest.ts b/tests/unit/GitTest.ts index 88610a12d1350..14e198d103f6f 100644 --- a/tests/unit/GitTest.ts +++ b/tests/unit/GitTest.ts @@ -509,4 +509,268 @@ describe('Git', () => { expect(file.removedLines.size).toBe(0); }); }); + + describe('isAddedDiffFile', () => { + it('returns true for newly added files with single hunk starting at line 0', () => { + const mockDiffOutput = dedent(` + diff --git a/new-file.ts b/new-file.ts + new file mode 100644 + index 0000000..1234567 + --- /dev/null + +++ b/new-file.ts + @@ -0,0 +1,3 @@ + +const hello = 'world'; + +const foo = 'bar'; + +const baz = 'qux'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(true); + }); + + it('returns true for newly added files with single line', () => { + const mockDiffOutput = dedent(` + diff --git a/single-line.ts b/single-line.ts + new file mode 100644 + index 0000000..1234567 + --- /dev/null + +++ b/single-line.ts + @@ -0,0 +1,1 @@ + +export const test = 'value'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(true); + }); + + it('returns false for files with modified lines', () => { + const mockDiffOutput = dedent(` + diff --git a/modified.ts b/modified.ts + index 1234567..abcdefg 100644 + --- a/modified.ts + +++ b/modified.ts + @@ -1,1 +1,1 @@ + -const old = 'value'; + +const new = 'value'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with removed lines', () => { + const mockDiffOutput = dedent(` + diff --git a/removed.ts b/removed.ts + index 1234567..abcdefg 100644 + --- a/removed.ts + +++ b/removed.ts + @@ -2,2 +2,0 @@ + -const removed1 = 'value1'; + -const removed2 = 'value2'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with both added and modified lines', () => { + const mockDiffOutput = dedent(` + diff --git a/mixed.ts b/mixed.ts + index 1234567..abcdefg 100644 + --- a/mixed.ts + +++ b/mixed.ts + @@ -1,1 +1,2 @@ + -const old = 'value'; + +const new = 'value'; + +const added = 'new'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with both added and removed lines', () => { + const mockDiffOutput = dedent(` + diff --git a/mixed.ts b/mixed.ts + index 1234567..abcdefg 100644 + --- a/mixed.ts + +++ b/mixed.ts + @@ -1,1 +1,1 @@ + -const removed = 'value'; + +const added = 'value'; + @@ -3,0 +4,1 @@ + +const newLine = 'new'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with multiple hunks', () => { + const mockDiffOutput = dedent(` + diff --git a/multi-hunk.ts b/multi-hunk.ts + new file mode 100644 + index 0000000..1234567 + --- /dev/null + +++ b/multi-hunk.ts + @@ -0,0 +1,2 @@ + +const first = 'value'; + +const second = 'value'; + @@ -0,0 +3,2 @@ + +const third = 'value'; + +const fourth = 'value'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with added lines but oldStart !== 0', () => { + const mockDiffOutput = dedent(` + diff --git a/inserted.ts b/inserted.ts + index 1234567..abcdefg 100644 + --- a/inserted.ts + +++ b/inserted.ts + @@ -5,0 +6,2 @@ + +const inserted1 = 'value1'; + +const inserted2 = 'value2'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with added lines but oldCount !== 0', () => { + const mockDiffOutput = dedent(` + diff --git a/partial.ts b/partial.ts + index 1234567..abcdefg 100644 + --- a/partial.ts + +++ b/partial.ts + @@ -1,1 +1,2 @@ + const existing = 'value'; + +const added = 'new'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with no added lines', () => { + const mockDiffOutput = dedent(` + diff --git a/no-additions.ts b/no-additions.ts + index 1234567..abcdefg 100644 + --- a/no-additions.ts + +++ b/no-additions.ts + @@ -1,2 +1,0 @@ + -const removed1 = 'value1'; + -const removed2 = 'value2'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + + it('returns false for files with only modified lines and no additions', () => { + const mockDiffOutput = dedent(` + diff --git a/modified-only.ts b/modified-only.ts + index 1234567..abcdefg 100644 + --- a/modified-only.ts + +++ b/modified-only.ts + @@ -1,1 +1,1 @@ + -const old = 'old'; + +const new = 'new'; + `); + + mockExecSync.mockReturnValue(mockDiffOutput); + + const result = Git.diff('main'); + const file = result.files.at(0); + expect(file).toBeDefined(); + if (!file) { + return; + } + + expect(Git.isAddedDiffFile(file)).toBe(false); + }); + }); }); diff --git a/tests/unit/ScriptFileUtilsTest.ts b/tests/unit/ScriptFileUtilsTest.ts new file mode 100644 index 0000000000000..847a3d65626ca --- /dev/null +++ b/tests/unit/ScriptFileUtilsTest.ts @@ -0,0 +1,35 @@ +import FileUtils, {ERROR_MESSAGES} from '@scripts/utils/FileUtils'; + +const multiLineSource = `abc +hello +world +test +multi +line +content`; + +describe('FileUtils (Scripts)', () => { + describe('getLineAndColumnFromIndex', () => { + it('should return correct line and column for multi-line source', () => { + expect(FileUtils.getLineAndColumnFromIndex(multiLineSource, 0)).toStrictEqual({line: 1, column: 1}); + expect(FileUtils.getLineAndColumnFromIndex(multiLineSource, 3)).toStrictEqual({line: 1, column: 4}); + expect(FileUtils.getLineAndColumnFromIndex(multiLineSource, 4)).toStrictEqual({line: 2, column: 1}); + expect(FileUtils.getLineAndColumnFromIndex(multiLineSource, 10)).toStrictEqual({line: 3, column: 1}); + expect(FileUtils.getLineAndColumnFromIndex(multiLineSource, multiLineSource.length)).toStrictEqual({line: 7, column: 8}); + }); + + it('should throw an error if source is empty', () => { + expect(() => FileUtils.getLineAndColumnFromIndex('', 0)).toThrow(ERROR_MESSAGES.SOURCE_CANNOT_BE_EMPTY); + }); + + it('should throw an error if index is negative', () => { + expect(() => FileUtils.getLineAndColumnFromIndex(multiLineSource, -1)).toThrow(ERROR_MESSAGES.INDEX_CANNOT_BE_NEGATIVE); + }); + + it('should throw an error if index is out of bounds', () => { + expect(() => FileUtils.getLineAndColumnFromIndex(multiLineSource, multiLineSource.length + 10)).toThrow( + ERROR_MESSAGES.INDEX_OUT_OF_BOUNDS(multiLineSource.length, multiLineSource.length + 10), + ); + }); + }); +});