diff --git a/.changeset/slick-lamps-press.md b/.changeset/slick-lamps-press.md new file mode 100644 index 0000000000..837dc3dffc --- /dev/null +++ b/.changeset/slick-lamps-press.md @@ -0,0 +1,7 @@ +--- +"@workflow/builders": patch +"@workflow/core": patch +"@workflow/next": patch +--- + +Add lazy workflow/step discovery via deferredEntries in next diff --git a/packages/builders/src/base-builder.ts b/packages/builders/src/base-builder.ts index 429da67684..e676b448cc 100644 --- a/packages/builders/src/base-builder.ts +++ b/packages/builders/src/base-builder.ts @@ -1,5 +1,5 @@ import { randomUUID } from 'node:crypto'; -import { mkdir, readFile, rename, writeFile } from 'node:fs/promises'; +import { mkdir, readFile, realpath, rename, writeFile } from 'node:fs/promises'; import { basename, dirname, join, relative, resolve } from 'node:path'; import { promisify } from 'node:util'; import { pluralize } from '@workflow/utils'; @@ -25,6 +25,12 @@ const enhancedResolve = promisify(enhancedResolveOriginal); const EMIT_SOURCEMAPS_FOR_DEBUGGING = process.env.WORKFLOW_EMIT_SOURCEMAPS_FOR_DEBUGGING === '1'; +export interface DiscoveredEntries { + discoveredSteps: string[]; + discoveredWorkflows: string[]; + discoveredSerdeFiles: string[]; +} + /** * Base class for workflow builders. Provides common build logic for transforming * workflow source files into deployable bundles using esbuild and SWC. @@ -100,11 +106,7 @@ export abstract class BaseBuilder { protected async discoverEntries( inputs: string[], outdir: string - ): Promise<{ - discoveredSteps: string[]; - discoveredWorkflows: string[]; - discoveredSerdeFiles: string[]; - }> { + ): Promise { const previousResult = this.discoveredEntries.get(inputs); if (previousResult) { @@ -270,23 +272,26 @@ export abstract class BaseBuilder { outfile, externalizeNonSteps, tsconfigPath, + discoveredEntries, }: { tsconfigPath?: string; inputFiles: string[]; outfile: string; format?: 'cjs' | 'esm'; externalizeNonSteps?: boolean; + discoveredEntries?: DiscoveredEntries; }): Promise<{ context: esbuild.BuildContext | undefined; manifest: WorkflowManifest; }> { // These need to handle watching for dev to scan for // new entries and changes to existing ones - const { - discoveredSteps: stepFiles, - discoveredWorkflows: workflowFiles, - discoveredSerdeFiles: serdeFiles, - } = await this.discoverEntries(inputFiles, dirname(outfile)); + const discovered = + discoveredEntries ?? + (await this.discoverEntries(inputFiles, dirname(outfile))); + const stepFiles = [...discovered.discoveredSteps].sort(); + const workflowFiles = [...discovered.discoveredWorkflows].sort(); + const serdeFiles = [...discovered.discoveredSerdeFiles].sort(); // Include serde files that aren't already step files for cross-context class registration. // Classes need to be registered in the step bundle so they can be deserialized @@ -368,6 +373,31 @@ export abstract class BaseBuilder { export { stepEntrypoint as POST } from 'workflow/runtime';`; // Bundle with esbuild and our custom SWC plugin + const entriesToBundle = externalizeNonSteps + ? [ + ...stepFiles, + ...serdeFiles, + ...(resolvedBuiltInSteps ? [resolvedBuiltInSteps] : []), + ] + : undefined; + const normalizedEntriesToBundle = entriesToBundle + ? Array.from( + new Set( + ( + await Promise.all( + entriesToBundle.map(async (entryToBundle) => { + const resolvedEntry = await realpath(entryToBundle).catch( + () => undefined + ); + return resolvedEntry + ? [entryToBundle, resolvedEntry] + : [entryToBundle]; + }) + ) + ).flat() + ) + ) + : undefined; const esbuildCtx = await esbuild.context({ banner: { js: '// biome-ignore-all lint: generated file\n/* eslint-disable */\n', @@ -414,13 +444,7 @@ export abstract class BaseBuilder { createPseudoPackagePlugin(), createSwcPlugin({ mode: 'step', - entriesToBundle: externalizeNonSteps - ? [ - ...stepFiles, - ...serdeFiles, - ...(resolvedBuiltInSteps ? [resolvedBuiltInSteps] : []), - ] - : undefined, + entriesToBundle: normalizedEntriesToBundle, outdir: outfile ? dirname(outfile) : undefined, workflowManifest, }), @@ -495,21 +519,24 @@ export abstract class BaseBuilder { outfile, bundleFinalOutput = true, tsconfigPath, + discoveredEntries, }: { tsconfigPath?: string; inputFiles: string[]; outfile: string; format?: 'cjs' | 'esm'; bundleFinalOutput?: boolean; + discoveredEntries?: DiscoveredEntries; }): Promise<{ manifest: WorkflowManifest; interimBundleCtx?: esbuild.BuildContext; bundleFinal?: (interimBundleResult: string) => Promise; }> { - const { - discoveredWorkflows: workflowFiles, - discoveredSerdeFiles: serdeFiles, - } = await this.discoverEntries(inputFiles, dirname(outfile)); + const discovered = + discoveredEntries ?? + (await this.discoverEntries(inputFiles, dirname(outfile))); + const workflowFiles = [...discovered.discoveredWorkflows].sort(); + const serdeFiles = [...discovered.discoveredSerdeFiles].sort(); // Include serde files that aren't already workflow files for cross-context class registration. // Classes need to be registered in the workflow bundle so they can be deserialized diff --git a/packages/builders/src/index.ts b/packages/builders/src/index.ts index c97f883e38..d7cf23d21b 100644 --- a/packages/builders/src/index.ts +++ b/packages/builders/src/index.ts @@ -38,6 +38,7 @@ export { workflowSerdeImportPattern, workflowSerdeSymbolPattern, } from './transform-utils.js'; +export { resolveWorkflowAliasRelativePath } from './workflow-alias.js'; export type { AstroConfig, BuildTarget, diff --git a/packages/builders/src/swc-esbuild-plugin.ts b/packages/builders/src/swc-esbuild-plugin.ts index 61cb4d13a3..8c941ac637 100644 --- a/packages/builders/src/swc-esbuild-plugin.ts +++ b/packages/builders/src/swc-esbuild-plugin.ts @@ -11,6 +11,7 @@ import { jsTsRegex, parentHasChild, } from './discover-entries-esbuild-plugin.js'; +import { resolveWorkflowAliasRelativePath } from './workflow-alias.js'; export interface SwcPluginOptions { mode: 'step' | 'workflow' | 'client'; @@ -187,10 +188,16 @@ export function createSwcPlugin(options: SwcPluginOptions): Plugin { // Handle files discovered outside the working directory // These come back as ../path/to/file, but we want just path/to/file if (relativeFilepath.startsWith('../')) { - relativeFilepath = relativeFilepath - .split('/') - .filter((part) => part !== '..') - .join('/'); + const aliasedRelativePath = + await resolveWorkflowAliasRelativePath(args.path, workingDir); + if (aliasedRelativePath) { + relativeFilepath = aliasedRelativePath; + } else { + relativeFilepath = relativeFilepath + .split('/') + .filter((part) => part !== '..') + .join('/'); + } } } diff --git a/packages/builders/src/workflow-alias.test.ts b/packages/builders/src/workflow-alias.test.ts new file mode 100644 index 0000000000..f39d6e8e24 --- /dev/null +++ b/packages/builders/src/workflow-alias.test.ts @@ -0,0 +1,68 @@ +import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { dirname, join } from 'node:path'; +import { afterEach, beforeEach, describe, expect, it } from 'vitest'; +import { + clearWorkflowAliasResolutionCache, + resolveWorkflowAliasRelativePath, +} from './workflow-alias.js'; + +function writeFile(path: string): void { + mkdirSync(dirname(path), { recursive: true }); + writeFileSync(path, "'use workflow';\n", 'utf-8'); +} + +describe('resolveWorkflowAliasRelativePath', () => { + let testRoot: string; + let workingDir: string; + + beforeEach(() => { + clearWorkflowAliasResolutionCache(); + testRoot = mkdtempSync(join(tmpdir(), 'workflow-alias-')); + workingDir = join(testRoot, 'app'); + mkdirSync(workingDir, { recursive: true }); + }); + + afterEach(() => { + clearWorkflowAliasResolutionCache(); + rmSync(testRoot, { recursive: true, force: true }); + }); + + it('maps files in workflows/ to workflows/* aliases', async () => { + const filePath = join(workingDir, 'workflows', 'foo.ts'); + writeFile(filePath); + + await expect( + resolveWorkflowAliasRelativePath(filePath, workingDir) + ).resolves.toBe('workflows/foo.ts'); + }); + + it('maps files in src/workflows/ to src/workflows/* aliases', async () => { + const filePath = join(workingDir, 'src', 'workflows', 'foo.ts'); + writeFile(filePath); + + await expect( + resolveWorkflowAliasRelativePath(filePath, workingDir) + ).resolves.toBe('src/workflows/foo.ts'); + }); + + it('returns undefined for files that are not under workflows paths', async () => { + const filePath = join(workingDir, 'lib', 'foo.ts'); + writeFile(filePath); + + await expect( + resolveWorkflowAliasRelativePath(filePath, workingDir) + ).resolves.toBeUndefined(); + }); + + it('returns undefined when basename matches but realpath differs', async () => { + const workflowFilePath = join(workingDir, 'workflows', 'foo.ts'); + const externalFilePath = join(testRoot, 'external', 'workflows', 'foo.ts'); + writeFile(workflowFilePath); + writeFile(externalFilePath); + + await expect( + resolveWorkflowAliasRelativePath(externalFilePath, workingDir) + ).resolves.toBeUndefined(); + }); +}); diff --git a/packages/builders/src/workflow-alias.ts b/packages/builders/src/workflow-alias.ts new file mode 100644 index 0000000000..30d2c3b727 --- /dev/null +++ b/packages/builders/src/workflow-alias.ts @@ -0,0 +1,64 @@ +import { access, realpath } from 'node:fs/promises'; +import { basename, resolve } from 'node:path'; + +const workflowAliasResolutionCache = new Map< + string, + Promise +>(); + +export function clearWorkflowAliasResolutionCache(): void { + workflowAliasResolutionCache.clear(); +} + +export async function resolveWorkflowAliasRelativePath( + absoluteFilePath: string, + workingDir: string +): Promise { + const normalizedAbsolutePath = absoluteFilePath.replace(/\\/g, '/'); + // Only workflow source files can map to app-level `workflows/*` aliases. + if (!normalizedAbsolutePath.includes('/workflows/')) { + return undefined; + } + + const cacheKey = `${workingDir}::${normalizedAbsolutePath}`; + const cached = workflowAliasResolutionCache.get(cacheKey); + if (cached) { + return cached; + } + + const resolutionPromise = (async () => { + const fileName = basename(absoluteFilePath); + const aliasDirs = ['workflows', 'src/workflows']; + const resolvedFilePath = await realpath(absoluteFilePath).catch( + () => undefined + ); + if (!resolvedFilePath) { + return undefined; + } + + const aliases = await Promise.all( + aliasDirs.map(async (aliasDir) => { + const candidatePath = resolve(workingDir, aliasDir, fileName); + try { + await access(candidatePath); + } catch { + return undefined; + } + const resolvedCandidatePath = await realpath(candidatePath).catch( + () => undefined + ); + if (!resolvedCandidatePath) { + return undefined; + } + return resolvedCandidatePath === resolvedFilePath + ? `${aliasDir}/${fileName}` + : undefined; + }) + ); + + return aliases.find((aliasPath): aliasPath is string => Boolean(aliasPath)); + })(); + + workflowAliasResolutionCache.set(cacheKey, resolutionPromise); + return resolutionPromise; +} diff --git a/packages/core/e2e/dev.test.ts b/packages/core/e2e/dev.test.ts index 824c807933..48fe1fcf80 100644 --- a/packages/core/e2e/dev.test.ts +++ b/packages/core/e2e/dev.test.ts @@ -1,6 +1,6 @@ import fs from 'fs/promises'; import path from 'path'; -import { afterEach, describe, expect, test } from 'vitest'; +import { afterEach, beforeAll, describe, expect, test } from 'vitest'; import { getWorkbenchAppPath } from './utils'; export interface DevTestConfig { @@ -35,6 +35,7 @@ export function createDevTests(config?: DevTestConfig) { } describe('dev e2e', () => { const appPath = getWorkbenchAppPath(); + const deploymentUrl = process.env.DEPLOYMENT_URL; const generatedStep = path.join(appPath, finalConfig.generatedStepPath); const generatedWorkflow = path.join( appPath, @@ -44,6 +45,28 @@ export function createDevTests(config?: DevTestConfig) { const workflowsDir = finalConfig.workflowsDir ?? 'workflows'; const restoreFiles: Array<{ path: string; content: string }> = []; + const fetchWithTimeout = (pathname: string) => { + if (!deploymentUrl) { + return Promise.resolve(); + } + + return fetch(new URL(pathname, deploymentUrl), { + signal: AbortSignal.timeout(5_000), + }); + }; + + const prewarm = async () => { + // Pre-warm the app with bounded requests so cleanup hooks cannot hang. + await Promise.all([ + fetchWithTimeout('/').catch(() => {}), + fetchWithTimeout('/api/chat').catch(() => {}), + ]); + }; + + beforeAll(async () => { + await prewarm(); + }); + afterEach(async () => { await Promise.all( restoreFiles.map(async (item) => { @@ -54,6 +77,7 @@ export function createDevTests(config?: DevTestConfig) { } }) ); + await prewarm(); restoreFiles.length = 0; }); @@ -145,6 +169,7 @@ ${apiFileContent}` while (true) { try { + await fetchWithTimeout('/api/chat'); const workflowContent = await fs.readFile( generatedWorkflow, 'utf8' diff --git a/packages/core/src/private.ts b/packages/core/src/private.ts index ee9738aa5b..ee6f7afbc4 100644 --- a/packages/core/src/private.ts +++ b/packages/core/src/private.ts @@ -16,6 +16,42 @@ export type StepFunction< const registeredSteps = new Map(); +function getStepIdAliasCandidates(stepId: string): string[] { + const parts = stepId.split('//'); + if (parts.length !== 3 || parts[0] !== 'step') { + return []; + } + + const modulePath = parts[1]; + const fnName = parts[2]; + const modulePathAliases = new Set(); + + const addAlias = (aliasModulePath: string) => { + if (aliasModulePath !== modulePath) { + modulePathAliases.add(aliasModulePath); + } + }; + + if (modulePath.startsWith('./workflows/')) { + const workflowRelativePath = modulePath.slice('./'.length); + addAlias(`./example/${workflowRelativePath}`); + addAlias(`./src/${workflowRelativePath}`); + } else if (modulePath.startsWith('./example/workflows/')) { + const workflowRelativePath = modulePath.slice('./example/'.length); + addAlias(`./${workflowRelativePath}`); + addAlias(`./src/${workflowRelativePath}`); + } else if (modulePath.startsWith('./src/workflows/')) { + const workflowRelativePath = modulePath.slice('./src/'.length); + addAlias(`./${workflowRelativePath}`); + addAlias(`./example/${workflowRelativePath}`); + } + + return Array.from( + modulePathAliases, + (aliasModulePath) => `step//${aliasModulePath}//${fnName}` + ); +} + /** * Register a step function to be served in the server bundle. * Also sets the stepId property on the function for serialization support. @@ -29,7 +65,20 @@ export function registerStepFunction(stepId: string, stepFn: StepFunction) { * Find a registered step function by name */ export function getStepFunction(stepId: string): StepFunction | undefined { - return registeredSteps.get(stepId); + const directMatch = registeredSteps.get(stepId); + if (directMatch) { + return directMatch; + } + + // Support equivalent workflow path aliases in mixed symlink environments. + for (const aliasStepId of getStepIdAliasCandidates(stepId)) { + const aliasMatch = registeredSteps.get(aliasStepId); + if (aliasMatch) { + return aliasMatch; + } + } + + return undefined; } /** diff --git a/packages/core/src/serialization.test.ts b/packages/core/src/serialization.test.ts index 13db84908f..71b6fdd4a0 100644 --- a/packages/core/src/serialization.test.ts +++ b/packages/core/src/serialization.test.ts @@ -1719,6 +1719,60 @@ describe('step function serialization', () => { expect(hydrated[0]).toBe(stepFn); }); + it('should deserialize step function using workflows/example path aliases', () => { + const registeredStepId = 'step//./example/workflows/99_e2e//doubleNumber'; + const aliasedStepId = 'step//./workflows/99_e2e//doubleNumber'; + const stepFn = async () => 42; + + registerStepFunction(registeredStepId, stepFn); + + const fnWithStepId = async () => 42; + Object.defineProperty(fnWithStepId, 'stepId', { + value: aliasedStepId, + writable: false, + enumerable: false, + configurable: false, + }); + const dehydrated = dehydrateStepArguments([fnWithStepId], globalThis); + const ops: Promise[] = []; + const hydrated = hydrateStepArguments( + dehydrated, + ops, + mockRunId, + globalThis + ); + const result = hydrated[0]; + + expect(result).toBe(stepFn); + }); + + it('should deserialize step function using workflows/src path aliases', () => { + const registeredStepId = 'step//./src/workflows/99_e2e//doubleFromSrc'; + const aliasedStepId = 'step//./workflows/99_e2e//doubleFromSrc'; + const stepFn = async () => 42; + + registerStepFunction(registeredStepId, stepFn); + + const fnWithStepId = async () => 42; + Object.defineProperty(fnWithStepId, 'stepId', { + value: aliasedStepId, + writable: false, + enumerable: false, + configurable: false, + }); + const dehydrated = dehydrateStepArguments([fnWithStepId], globalThis); + const ops: Promise[] = []; + const hydrated = hydrateStepArguments( + dehydrated, + ops, + mockRunId, + globalThis + ); + const result = hydrated[0]; + + expect(result).toBe(stepFn); + }); + it('should throw error when reviver cannot find registered step function', () => { // Create a function with a non-existent stepId const fnWithNonExistentStepId = async () => 42; diff --git a/packages/next/src/builder-deferred.ts b/packages/next/src/builder-deferred.ts new file mode 100644 index 0000000000..d9612ce42f --- /dev/null +++ b/packages/next/src/builder-deferred.ts @@ -0,0 +1,1056 @@ +import { createHash } from 'node:crypto'; +import { constants } from 'node:fs'; +import { access, mkdir, readFile, stat, writeFile } from 'node:fs/promises'; +import os from 'node:os'; +import { dirname, isAbsolute, join, resolve } from 'node:path'; +import { + createSocketServer, + type SocketIO, + type SocketServerConfig, +} from './socket-server.js'; + +const ROUTE_STUB_FILE_MARKER = 'WORKFLOW_ROUTE_STUB_FILE'; + +let CachedNextBuilderDeferred: any; + +// Create the deferred Next builder dynamically by extending the ESM BaseBuilder. +// Exported as getNextBuilderDeferred() to allow CommonJS modules to import from +// the ESM @workflow/builders package via dynamic import at runtime. +export async function getNextBuilderDeferred() { + if (CachedNextBuilderDeferred) { + return CachedNextBuilderDeferred; + } + + const { + BaseBuilder: BaseBuilderClass, + STEP_QUEUE_TRIGGER, + WORKFLOW_QUEUE_TRIGGER, + detectWorkflowPatterns, + isWorkflowSdkFile, + // biome-ignore lint/security/noGlobalEval: Need to use eval here to avoid TypeScript from transpiling the import statement into `require()` + } = (await eval( + 'import("@workflow/builders")' + )) as typeof import('@workflow/builders'); + + class NextDeferredBuilder extends BaseBuilderClass { + private socketIO?: SocketIO; + private readonly discoveredWorkflowFiles = new Set(); + private readonly discoveredStepFiles = new Set(); + private readonly discoveredSerdeFiles = new Set(); + private trackedDependencyFiles = new Set(); + private deferredBuildQueue = Promise.resolve(); + private cacheInitialized = false; + private cacheWriteTimer: NodeJS.Timeout | null = null; + private lastDeferredBuildSignature: string | null = null; + + async build() { + const outputDir = await this.findAppDirectory(); + + await this.initializeDiscoveryState(); + + await this.writeStubFiles(outputDir); + await this.createDiscoverySocketServer(); + } + + async onBeforeDeferredEntries(): Promise { + await this.initializeDiscoveryState(); + await this.validateDiscoveredEntryFiles(); + const implicitStepFiles = await this.resolveImplicitStepFiles(); + + const inputFiles = Array.from( + new Set([ + ...this.discoveredWorkflowFiles, + ...this.discoveredStepFiles, + ...implicitStepFiles, + ]) + ).sort(); + const pendingBuild = this.deferredBuildQueue.then(() => + this.buildDeferredEntriesUntilStable(inputFiles, implicitStepFiles) + ); + + // Keep the queue chain alive even when the current build fails so future + // callbacks can enqueue another attempt without triggering unhandled + // rejection warnings. + this.deferredBuildQueue = pendingBuild.catch(() => { + // Error is surfaced through `pendingBuild` below. + }); + + await pendingBuild; + } + + private async buildDeferredEntriesUntilStable( + inputFiles: string[], + implicitStepFiles: string[] + ): Promise { + // A successful build can discover additional transitive dependency files + // (via source maps), which changes the signature and may require one more + // build pass to include newly discovered serde files. + const maxBuildPasses = 3; + + for (let buildPass = 0; buildPass < maxBuildPasses; buildPass++) { + const buildSignature = + await this.createDeferredBuildSignature(inputFiles); + if (buildSignature === this.lastDeferredBuildSignature) { + return; + } + + let didBuildSucceed = false; + try { + await this.buildDiscoveredFiles(inputFiles, implicitStepFiles); + didBuildSucceed = true; + } catch (error) { + if (this.config.watch) { + console.warn( + '[workflow] Deferred entries build failed. Will retry only after inputs change.', + error + ); + } else { + throw error; + } + } finally { + // Record attempted signature even on failure so we don't loop on the + // same broken input graph. + this.lastDeferredBuildSignature = buildSignature; + } + + if (!didBuildSucceed) { + return; + } + + const postBuildSignature = + await this.createDeferredBuildSignature(inputFiles); + if (postBuildSignature === buildSignature) { + return; + } + } + + console.warn( + '[workflow] Deferred entries build signature did not stabilize after 3 passes.' + ); + } + + private async resolveImplicitStepFiles(): Promise { + let workflowCjsEntry: string; + try { + workflowCjsEntry = require.resolve('workflow', { + paths: [this.config.workingDir], + }); + } catch { + return []; + } + + const workflowDistDir = dirname(workflowCjsEntry); + const workflowStdlibPath = this.normalizeDiscoveredFilePath( + join(workflowDistDir, 'stdlib.js') + ); + + const candidatePaths = [workflowStdlibPath]; + const existingFiles = await Promise.all( + candidatePaths.map(async (filePath) => { + try { + const fileStats = await stat(filePath); + return fileStats.isFile() ? filePath : null; + } catch { + return null; + } + }) + ); + + return existingFiles.filter((filePath): filePath is string => + Boolean(filePath) + ); + } + + private areFileSetsEqual(a: Set, b: Set): boolean { + if (a.size !== b.size) { + return false; + } + + for (const filePath of a) { + if (!b.has(filePath)) { + return false; + } + } + + return true; + } + + private async reconcileDiscoveredEntries({ + workflowCandidates, + stepCandidates, + serdeCandidates, + validatePatterns, + }: { + workflowCandidates: Iterable; + stepCandidates: Iterable; + serdeCandidates?: Iterable; + validatePatterns: boolean; + }): Promise<{ + workflowFiles: Set; + stepFiles: Set; + serdeFiles: Set; + }> { + const candidatesByFile = new Map< + string, + { + hasWorkflowCandidate: boolean; + hasStepCandidate: boolean; + hasSerdeCandidate: boolean; + } + >(); + + for (const filePath of workflowCandidates) { + const normalizedPath = this.normalizeDiscoveredFilePath(filePath); + const existing = candidatesByFile.get(normalizedPath); + if (existing) { + existing.hasWorkflowCandidate = true; + } else { + candidatesByFile.set(normalizedPath, { + hasWorkflowCandidate: true, + hasStepCandidate: false, + hasSerdeCandidate: false, + }); + } + } + + for (const filePath of stepCandidates) { + const normalizedPath = this.normalizeDiscoveredFilePath(filePath); + const existing = candidatesByFile.get(normalizedPath); + if (existing) { + existing.hasStepCandidate = true; + } else { + candidatesByFile.set(normalizedPath, { + hasWorkflowCandidate: false, + hasStepCandidate: true, + hasSerdeCandidate: false, + }); + } + } + + if (serdeCandidates) { + for (const filePath of serdeCandidates) { + const normalizedPath = this.normalizeDiscoveredFilePath(filePath); + const existing = candidatesByFile.get(normalizedPath); + if (existing) { + existing.hasSerdeCandidate = true; + } else { + candidatesByFile.set(normalizedPath, { + hasWorkflowCandidate: false, + hasStepCandidate: false, + hasSerdeCandidate: true, + }); + } + } + } + + const fileEntries = Array.from(candidatesByFile.entries()).sort( + ([a], [b]) => a.localeCompare(b) + ); + const validatedEntries = await Promise.all( + fileEntries.map(async ([filePath, candidates]) => { + try { + const fileStats = await stat(filePath); + if (!fileStats.isFile()) { + return null; + } + + if (!validatePatterns) { + const isSdkFile = isWorkflowSdkFile(filePath); + return { + filePath, + hasUseWorkflow: candidates.hasWorkflowCandidate, + hasUseStep: candidates.hasStepCandidate, + hasSerde: candidates.hasSerdeCandidate && !isSdkFile, + }; + } + + const source = await readFile(filePath, 'utf-8'); + const patterns = detectWorkflowPatterns(source); + const isSdkFile = isWorkflowSdkFile(filePath); + return { + filePath, + hasUseWorkflow: patterns.hasUseWorkflow, + hasUseStep: patterns.hasUseStep, + hasSerde: patterns.hasSerde && !isSdkFile, + }; + } catch { + return null; + } + }) + ); + + const workflowFiles = new Set(); + const stepFiles = new Set(); + const serdeFiles = new Set(); + for (const entry of validatedEntries) { + if (!entry) { + continue; + } + if (entry.hasUseWorkflow) { + workflowFiles.add(entry.filePath); + } + if (entry.hasUseStep) { + stepFiles.add(entry.filePath); + } + if (entry.hasSerde) { + serdeFiles.add(entry.filePath); + } + } + + return { workflowFiles, stepFiles, serdeFiles }; + } + + private async validateDiscoveredEntryFiles(): Promise { + const { workflowFiles, stepFiles, serdeFiles } = + await this.reconcileDiscoveredEntries({ + workflowCandidates: this.discoveredWorkflowFiles, + stepCandidates: this.discoveredStepFiles, + serdeCandidates: this.discoveredSerdeFiles, + validatePatterns: true, + }); + const workflowsChanged = !this.areFileSetsEqual( + this.discoveredWorkflowFiles, + workflowFiles + ); + const stepsChanged = !this.areFileSetsEqual( + this.discoveredStepFiles, + stepFiles + ); + const serdeChanged = !this.areFileSetsEqual( + this.discoveredSerdeFiles, + serdeFiles + ); + + if (workflowsChanged || stepsChanged || serdeChanged) { + this.discoveredWorkflowFiles.clear(); + this.discoveredStepFiles.clear(); + this.discoveredSerdeFiles.clear(); + for (const filePath of workflowFiles) { + this.discoveredWorkflowFiles.add(filePath); + } + for (const filePath of stepFiles) { + this.discoveredStepFiles.add(filePath); + } + for (const filePath of serdeFiles) { + this.discoveredSerdeFiles.add(filePath); + } + } + + if (workflowsChanged || stepsChanged) { + this.scheduleWorkflowsCacheWrite(); + } + } + + private async buildDiscoveredFiles( + inputFiles: string[], + implicitStepFiles: string[] + ) { + const outputDir = await this.findAppDirectory(); + const workflowGeneratedDir = join(outputDir, '.well-known/workflow/v1'); + const cacheDir = join(this.config.workingDir, this.getDistDir(), 'cache'); + await mkdir(cacheDir, { recursive: true }); + const manifestBuildDir = join(cacheDir, 'workflow-generated-manifest'); + const tempRouteFileName = 'route.js.temp'; + const discoveredStepFiles = Array.from( + new Set([...this.discoveredStepFiles, ...implicitStepFiles]) + ).sort(); + const discoveredWorkflowFiles = Array.from( + this.discoveredWorkflowFiles + ).sort(); + const trackedSerdeFiles = await this.collectTrackedSerdeFiles(); + const discoveredSerdeFiles = Array.from( + new Set([...this.discoveredSerdeFiles, ...trackedSerdeFiles]) + ).sort(); + const discoveredEntries = { + discoveredSteps: discoveredStepFiles, + discoveredWorkflows: discoveredWorkflowFiles, + discoveredSerdeFiles, + }; + + // Ensure output directories exist + await mkdir(workflowGeneratedDir, { recursive: true }); + + await this.writeFileIfChanged( + join(workflowGeneratedDir, '.gitignore'), + '*' + ); + + const tsconfigPath = await this.findTsConfigPath(); + + const options = { + inputFiles, + workflowGeneratedDir, + tsconfigPath, + routeFileName: tempRouteFileName, + discoveredEntries, + }; + + const { manifest: stepsManifest } = + await this.buildStepsFunction(options); + const workflowsBundle = await this.buildWorkflowsFunction(options); + await this.buildWebhookRoute({ + workflowGeneratedDir, + routeFileName: tempRouteFileName, + }); + await this.refreshTrackedDependencyFiles( + workflowGeneratedDir, + tempRouteFileName + ); + + // Merge manifests from both bundles + const manifest = { + steps: { ...stepsManifest.steps, ...workflowsBundle?.manifest?.steps }, + workflows: { + ...stepsManifest.workflows, + ...workflowsBundle?.manifest?.workflows, + }, + classes: { + ...stepsManifest.classes, + ...workflowsBundle?.manifest?.classes, + }, + }; + + const manifestFilePath = join(workflowGeneratedDir, 'manifest.json'); + const manifestBuildPath = join(manifestBuildDir, 'manifest.json'); + const workflowBundlePath = join( + workflowGeneratedDir, + `flow/${tempRouteFileName}` + ); + const manifestJson = await this.createManifest({ + workflowBundlePath, + manifestDir: manifestBuildDir, + manifest, + }); + await this.rewriteJsonFileWithStableKeyOrder(manifestBuildPath); + await this.copyFileIfChanged(manifestBuildPath, manifestFilePath); + + await this.writeFunctionsConfig(outputDir); + + await this.copyFileIfChanged( + join(workflowGeneratedDir, `flow/${tempRouteFileName}`), + join(workflowGeneratedDir, 'flow/route.js') + ); + await this.copyFileIfChanged( + join(workflowGeneratedDir, `step/${tempRouteFileName}`), + join(workflowGeneratedDir, 'step/route.js') + ); + await this.copyFileIfChanged( + join(workflowGeneratedDir, `webhook/[token]/${tempRouteFileName}`), + join(workflowGeneratedDir, 'webhook/[token]/route.js') + ); + + // Expose manifest as a static file when WORKFLOW_PUBLIC_MANIFEST=1. + // Next.js serves files from public/ at the root URL. + if (this.shouldExposePublicManifest && manifestJson) { + const publicManifestDir = join( + this.config.workingDir, + 'public/.well-known/workflow/v1' + ); + await mkdir(publicManifestDir, { recursive: true }); + await this.copyFileIfChanged( + manifestFilePath, + join(publicManifestDir, 'manifest.json') + ); + } + + // Notify deferred entry loaders waiting on route.js stubs. + this.socketIO?.emit('build-complete'); + } + + private async createDiscoverySocketServer(): Promise { + if (this.socketIO || process.env.WORKFLOW_SOCKET_PORT) { + return; + } + + const config: SocketServerConfig = { + isDevServer: Boolean(this.config.watch), + onFileDiscovered: ( + filePath: string, + hasWorkflow: boolean, + hasStep: boolean, + hasSerde: boolean + ) => { + const normalizedFilePath = this.normalizeDiscoveredFilePath(filePath); + let hasCacheTrackingChange = false; + + if (hasWorkflow) { + if (!this.discoveredWorkflowFiles.has(normalizedFilePath)) { + this.discoveredWorkflowFiles.add(normalizedFilePath); + hasCacheTrackingChange = true; + } + } else { + const wasDeleted = + this.discoveredWorkflowFiles.delete(normalizedFilePath); + hasCacheTrackingChange = wasDeleted || hasCacheTrackingChange; + } + + if (hasStep) { + if (!this.discoveredStepFiles.has(normalizedFilePath)) { + this.discoveredStepFiles.add(normalizedFilePath); + hasCacheTrackingChange = true; + } + } else { + const wasDeleted = + this.discoveredStepFiles.delete(normalizedFilePath); + hasCacheTrackingChange = wasDeleted || hasCacheTrackingChange; + } + + if (hasSerde) { + this.discoveredSerdeFiles.add(normalizedFilePath); + } else { + this.discoveredSerdeFiles.delete(normalizedFilePath); + } + + if (hasCacheTrackingChange) { + this.scheduleWorkflowsCacheWrite(); + } + }, + onTriggerBuild: () => { + // Deferred builder builds via onBeforeDeferredEntries callback. + }, + }; + + this.socketIO = await createSocketServer(config); + } + + private async initializeDiscoveryState(): Promise { + if (this.cacheInitialized) { + return; + } + + await this.loadWorkflowsCache(); + this.cacheInitialized = true; + } + + private getDistDir(): string { + return (this.config as { distDir?: string }).distDir || '.next'; + } + + private getWorkflowsCacheFilePath(): string { + return join( + this.config.workingDir, + this.getDistDir(), + 'cache', + 'workflows.json' + ); + } + + private normalizeDiscoveredFilePath(filePath: string): string { + return isAbsolute(filePath) + ? filePath + : resolve(this.config.workingDir, filePath); + } + + private async createDeferredBuildSignature( + inputFiles: string[] + ): Promise { + const normalizedFiles = Array.from( + new Set([ + ...inputFiles.map((filePath) => + this.normalizeDiscoveredFilePath(filePath) + ), + ...this.trackedDependencyFiles, + ]) + ).sort(); + + const signatureParts = await Promise.all( + normalizedFiles.map(async (filePath) => { + try { + const fileStats = await stat(filePath); + return `${filePath}:${fileStats.size}:${Math.trunc(fileStats.mtimeMs)}`; + } catch { + return `${filePath}:missing`; + } + }) + ); + + const signatureHash = createHash('sha256'); + for (const signaturePart of signatureParts) { + signatureHash.update(signaturePart); + signatureHash.update('\n'); + } + + return signatureHash.digest('hex'); + } + + private async collectTrackedSerdeFiles(): Promise { + if (this.trackedDependencyFiles.size === 0) { + return []; + } + + const { serdeFiles } = await this.reconcileDiscoveredEntries({ + workflowCandidates: [], + stepCandidates: [], + serdeCandidates: this.trackedDependencyFiles, + validatePatterns: true, + }); + + return Array.from(serdeFiles); + } + + private async refreshTrackedDependencyFiles( + workflowGeneratedDir: string, + routeFileName: string + ): Promise { + const bundleFiles = [ + join(workflowGeneratedDir, `step/${routeFileName}`), + join(workflowGeneratedDir, `flow/${routeFileName}`), + ]; + const trackedFiles = new Set(); + + for (const bundleFile of bundleFiles) { + const bundleSources = await this.extractBundleSourceFiles(bundleFile); + for (const sourceFile of bundleSources) { + trackedFiles.add(sourceFile); + } + } + + if (trackedFiles.size > 0) { + this.trackedDependencyFiles = trackedFiles; + } + } + + private async extractBundleSourceFiles( + bundleFilePath: string + ): Promise { + let bundleContents: string; + try { + bundleContents = await readFile(bundleFilePath, 'utf-8'); + } catch { + return []; + } + + const baseDirectory = dirname(bundleFilePath); + const localSourceFiles = new Set(); + const sourceMapMatches = bundleContents.matchAll( + /\/\/# sourceMappingURL=data:application\/json[^,]*;base64,([A-Za-z0-9+/=]+)/g + ); + + for (const match of sourceMapMatches) { + const base64Value = match[1]; + if (!base64Value) { + continue; + } + + let sourceMap: { sourceRoot?: unknown; sources?: unknown }; + try { + sourceMap = JSON.parse( + Buffer.from(base64Value, 'base64').toString('utf-8') + ) as { sourceRoot?: unknown; sources?: unknown }; + } catch { + continue; + } + + const sourceRoot = + typeof sourceMap.sourceRoot === 'string' ? sourceMap.sourceRoot : ''; + const sources = Array.isArray(sourceMap.sources) + ? sourceMap.sources.filter( + (source): source is string => typeof source === 'string' + ) + : []; + + for (const source of sources) { + if (source.startsWith('webpack://') || source.startsWith('<')) { + continue; + } + + let resolvedSourcePath: string; + if (source.startsWith('file://')) { + try { + resolvedSourcePath = decodeURIComponent(new URL(source).pathname); + } catch { + continue; + } + } else if (isAbsolute(source)) { + resolvedSourcePath = source; + } else { + resolvedSourcePath = resolve(baseDirectory, sourceRoot, source); + } + + const normalizedSourcePath = + this.normalizeDiscoveredFilePath(resolvedSourcePath); + const normalizedSourcePathForCheck = normalizedSourcePath.replace( + /\\/g, + '/' + ); + if ( + normalizedSourcePathForCheck.includes('/.well-known/workflow/') || + normalizedSourcePathForCheck.includes('/node_modules/') || + normalizedSourcePathForCheck.includes('/.pnpm/') || + normalizedSourcePathForCheck.includes('/.next/') || + normalizedSourcePathForCheck.endsWith('/virtual-entry.js') + ) { + continue; + } + + localSourceFiles.add(normalizedSourcePath); + } + } + + return Array.from(localSourceFiles); + } + + private scheduleWorkflowsCacheWrite(): void { + if (this.cacheWriteTimer) { + clearTimeout(this.cacheWriteTimer); + } + + this.cacheWriteTimer = setTimeout(() => { + this.cacheWriteTimer = null; + void this.writeWorkflowsCache().catch((error) => { + console.warn('Failed to write workflow discovery cache', error); + }); + }, 50); + } + + private async readWorkflowsCache(): Promise<{ + workflowFiles: string[]; + stepFiles: string[]; + } | null> { + const cacheFilePath = this.getWorkflowsCacheFilePath(); + + try { + const cacheContents = await readFile(cacheFilePath, 'utf-8'); + const parsed = JSON.parse(cacheContents) as { + workflowFiles?: unknown; + stepFiles?: unknown; + }; + + const workflowFiles = Array.isArray(parsed.workflowFiles) + ? parsed.workflowFiles.filter( + (item): item is string => typeof item === 'string' + ) + : []; + const stepFiles = Array.isArray(parsed.stepFiles) + ? parsed.stepFiles.filter( + (item): item is string => typeof item === 'string' + ) + : []; + + return { workflowFiles, stepFiles }; + } catch { + return null; + } + } + + private async loadWorkflowsCache(): Promise { + const cachedData = await this.readWorkflowsCache(); + if (!cachedData) { + return; + } + const { workflowFiles, stepFiles, serdeFiles } = + await this.reconcileDiscoveredEntries({ + workflowCandidates: cachedData.workflowFiles, + stepCandidates: cachedData.stepFiles, + serdeCandidates: this.discoveredSerdeFiles, + validatePatterns: true, + }); + + this.discoveredWorkflowFiles.clear(); + this.discoveredStepFiles.clear(); + this.discoveredSerdeFiles.clear(); + for (const filePath of workflowFiles) { + this.discoveredWorkflowFiles.add(filePath); + } + for (const filePath of stepFiles) { + this.discoveredStepFiles.add(filePath); + } + for (const filePath of serdeFiles) { + this.discoveredSerdeFiles.add(filePath); + } + } + + private async writeWorkflowsCache(): Promise { + const cacheFilePath = this.getWorkflowsCacheFilePath(); + const cacheDir = join(this.config.workingDir, this.getDistDir(), 'cache'); + await mkdir(cacheDir, { recursive: true }); + + const cacheData = { + workflowFiles: Array.from(this.discoveredWorkflowFiles).sort(), + stepFiles: Array.from(this.discoveredStepFiles).sort(), + }; + + await writeFile(cacheFilePath, JSON.stringify(cacheData, null, 2)); + } + + private async writeStubFiles(outputDir: string): Promise { + // Turbopack currently has a worker-concurrency limitation for pending + // virtual entries. Warn if parallelism is too low to reliably discover. + const parallelismCount = os.availableParallelism(); + if (process.env.TURBOPACK && parallelismCount < 4) { + console.warn( + `Available parallelism of ${parallelismCount} is less than needed 4. This can cause workflows/steps to fail to discover properly in turbopack` + ); + } + + const routeStubContent = [ + `// ${ROUTE_STUB_FILE_MARKER}`, + 'export const __workflowRouteStub = true;', + ].join('\n'); + const workflowGeneratedDir = join(outputDir, '.well-known/workflow/v1'); + + await mkdir(join(workflowGeneratedDir, 'flow'), { recursive: true }); + await mkdir(join(workflowGeneratedDir, 'step'), { recursive: true }); + await mkdir(join(workflowGeneratedDir, 'webhook/[token]'), { + recursive: true, + }); + + await this.writeFileIfChanged( + join(workflowGeneratedDir, '.gitignore'), + '*' + ); + + // route.js stubs are replaced by generated route.js output once discovery + // finishes and a deferred build completes. + await this.writeFileIfChanged( + join(workflowGeneratedDir, 'flow/route.js'), + routeStubContent + ); + await this.writeFileIfChanged( + join(workflowGeneratedDir, 'step/route.js'), + routeStubContent + ); + await this.writeFileIfChanged( + join(workflowGeneratedDir, 'webhook/[token]/route.js'), + routeStubContent + ); + } + + protected async getInputFiles(): Promise { + const inputFiles = await super.getInputFiles(); + return inputFiles.filter((item) => { + // Match App Router entrypoints: route.ts, page.ts, layout.ts in app/ or src/app/ directories + // Matches: /app/page.ts, /app/dashboard/page.ts, /src/app/route.ts, etc. + if ( + item.match( + /(^|.*[/\\])(app|src[/\\]app)([/\\](route|page|layout)\.|[/\\].*[/\\](route|page|layout)\.)/ + ) + ) { + return true; + } + // Match Pages Router entrypoints: files in pages/ or src/pages/ + if (item.match(/[/\\](pages|src[/\\]pages)[/\\]/)) { + return true; + } + return false; + }); + } + + private async writeFunctionsConfig(outputDir: string) { + // we don't run this in development mode as it's not needed + if (process.env.NODE_ENV === 'development') { + return; + } + const generatedConfig = { + version: '0', + steps: { + experimentalTriggers: [STEP_QUEUE_TRIGGER], + }, + workflows: { + experimentalTriggers: [WORKFLOW_QUEUE_TRIGGER], + }, + }; + + // We write this file to the generated directory for + // the Next.js builder to consume + await this.writeFileIfChanged( + join(outputDir, '.well-known/workflow/v1/config.json'), + JSON.stringify(generatedConfig, null, 2) + ); + } + + private async writeFileIfChanged( + filePath: string, + contents: string | Buffer + ): Promise { + const nextBuffer = Buffer.isBuffer(contents) + ? contents + : Buffer.from(contents); + + try { + const currentBuffer = await readFile(filePath); + if (currentBuffer.equals(nextBuffer)) { + return false; + } + } catch { + // File does not exist yet or cannot be read; write a fresh copy. + } + + await mkdir(dirname(filePath), { recursive: true }); + await writeFile(filePath, nextBuffer); + return true; + } + + private async copyFileIfChanged( + sourcePath: string, + destinationPath: string + ): Promise { + const sourceContents = await readFile(sourcePath); + return this.writeFileIfChanged(destinationPath, sourceContents); + } + + private sortJsonValue(value: unknown): unknown { + if (Array.isArray(value)) { + return value.map((item) => this.sortJsonValue(item)); + } + if (value && typeof value === 'object') { + const sortedEntries = Object.entries(value as Record) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([key, entryValue]) => [key, this.sortJsonValue(entryValue)]); + return Object.fromEntries(sortedEntries); + } + return value; + } + + private async rewriteJsonFileWithStableKeyOrder( + filePath: string + ): Promise { + try { + const contents = await readFile(filePath, 'utf-8'); + const parsed = JSON.parse(contents) as unknown; + const normalized = this.sortJsonValue(parsed); + await this.writeFileIfChanged( + filePath, + `${JSON.stringify(normalized, null, 2)}\n` + ); + } catch { + // Manifest may not exist (e.g. manifest generation failed); ignore. + } + } + + private async buildStepsFunction({ + inputFiles, + workflowGeneratedDir, + tsconfigPath, + routeFileName = 'route.js', + discoveredEntries, + }: { + inputFiles: string[]; + workflowGeneratedDir: string; + tsconfigPath?: string; + routeFileName?: string; + discoveredEntries?: { + discoveredSteps: string[]; + discoveredWorkflows: string[]; + discoveredSerdeFiles: string[]; + }; + }) { + // Create steps bundle + const stepsRouteDir = join(workflowGeneratedDir, 'step'); + await mkdir(stepsRouteDir, { recursive: true }); + return await this.createStepsBundle({ + // If any dynamic requires are used when bundling with ESM + // esbuild will create a too dynamic wrapper around require + // which turbopack/webpack fail to analyze. If we externalize + // correctly this shouldn't be an issue although we might want + // to use cjs as alternative to avoid + format: 'esm', + inputFiles, + outfile: join(stepsRouteDir, routeFileName), + externalizeNonSteps: true, + tsconfigPath, + discoveredEntries, + }); + } + + private async buildWorkflowsFunction({ + inputFiles, + workflowGeneratedDir, + tsconfigPath, + routeFileName = 'route.js', + discoveredEntries, + }: { + inputFiles: string[]; + workflowGeneratedDir: string; + tsconfigPath?: string; + routeFileName?: string; + discoveredEntries?: { + discoveredSteps: string[]; + discoveredWorkflows: string[]; + discoveredSerdeFiles: string[]; + }; + }) { + const workflowsRouteDir = join(workflowGeneratedDir, 'flow'); + await mkdir(workflowsRouteDir, { recursive: true }); + return await this.createWorkflowsBundle({ + format: 'esm', + outfile: join(workflowsRouteDir, routeFileName), + bundleFinalOutput: false, + inputFiles, + tsconfigPath, + discoveredEntries, + }); + } + + private async buildWebhookRoute({ + workflowGeneratedDir, + routeFileName = 'route.js', + }: { + workflowGeneratedDir: string; + routeFileName?: string; + }): Promise { + const webhookRouteFile = join( + workflowGeneratedDir, + `webhook/[token]/${routeFileName}` + ); + await this.createWebhookBundle({ + outfile: webhookRouteFile, + bundle: false, // Next.js doesn't need bundling + }); + } + + private async findAppDirectory(): Promise { + const appDir = resolve(this.config.workingDir, 'app'); + const srcAppDir = resolve(this.config.workingDir, 'src/app'); + const pagesDir = resolve(this.config.workingDir, 'pages'); + const srcPagesDir = resolve(this.config.workingDir, 'src/pages'); + + // Helper to check if a path exists and is a directory + const isDirectory = async (path: string): Promise => { + try { + await access(path, constants.F_OK); + const stats = await stat(path); + if (!stats.isDirectory()) { + throw new Error(`Path exists but is not a directory: ${path}`); + } + return true; + } catch (e) { + if (e instanceof Error && e.message.includes('not a directory')) { + throw e; + } + return false; + } + }; + + // Check if app directory exists + if (await isDirectory(appDir)) { + return appDir; + } + + // Check if src/app directory exists + if (await isDirectory(srcAppDir)) { + return srcAppDir; + } + + // If no app directory exists, check for pages directory and create app next to it + if (await isDirectory(pagesDir)) { + // Create app directory next to pages directory + await mkdir(appDir, { recursive: true }); + return appDir; + } + + if (await isDirectory(srcPagesDir)) { + // Create src/app directory next to src/pages directory + await mkdir(srcAppDir, { recursive: true }); + return srcAppDir; + } + + throw new Error( + 'Could not find Next.js app or pages directory. Expected one of: "app", "src/app", "pages", or "src/pages" to exist.' + ); + } + } + + CachedNextBuilderDeferred = NextDeferredBuilder; + return NextDeferredBuilder; +} diff --git a/packages/next/src/builder-eager.ts b/packages/next/src/builder-eager.ts new file mode 100644 index 0000000000..c46eb03e62 --- /dev/null +++ b/packages/next/src/builder-eager.ts @@ -0,0 +1,549 @@ +import { constants } from 'node:fs'; +import { access, copyFile, mkdir, stat, writeFile } from 'node:fs/promises'; +import { extname, join, resolve } from 'node:path'; +import Watchpack from 'watchpack'; + +let CachedNextBuilderEager: any; + +// Create the eager Next builder dynamically by extending the ESM BaseBuilder. +// Exported as getNextBuilderEager() to allow CommonJS modules to import from +// the ESM @workflow/builders package via dynamic import at runtime. +export async function getNextBuilderEager() { + if (CachedNextBuilderEager) { + return CachedNextBuilderEager; + } + + const { + BaseBuilder: BaseBuilderClass, + STEP_QUEUE_TRIGGER, + WORKFLOW_QUEUE_TRIGGER, + // biome-ignore lint/security/noGlobalEval: Need to use eval here to avoid TypeScript from transpiling the import statement into `require()` + } = (await eval( + 'import("@workflow/builders")' + )) as typeof import('@workflow/builders'); + + class NextBuilder extends BaseBuilderClass { + async build() { + const outputDir = await this.findAppDirectory(); + const workflowGeneratedDir = join(outputDir, '.well-known/workflow/v1'); + + // Ensure output directories exist + await mkdir(workflowGeneratedDir, { recursive: true }); + // ignore the generated assets + + await writeFile(join(workflowGeneratedDir, '.gitignore'), '*'); + + const inputFiles = await this.getInputFiles(); + const tsconfigPath = await this.findTsConfigPath(); + + const options = { + inputFiles, + workflowGeneratedDir, + tsconfigPath, + }; + + const { manifest: stepsManifest, context: stepsBuildContext } = + await this.buildStepsFunction(options); + const workflowsBundle = await this.buildWorkflowsFunction(options); + await this.buildWebhookRoute({ workflowGeneratedDir }); + + // Merge manifests from both bundles + const manifest = { + steps: { ...stepsManifest.steps, ...workflowsBundle?.manifest?.steps }, + workflows: { + ...stepsManifest.workflows, + ...workflowsBundle?.manifest?.workflows, + }, + classes: { + ...stepsManifest.classes, + ...workflowsBundle?.manifest?.classes, + }, + }; + + // Write unified manifest to workflow generated directory + const workflowBundlePath = join(workflowGeneratedDir, 'flow/route.js'); + const manifestJson = await this.createManifest({ + workflowBundlePath, + manifestDir: workflowGeneratedDir, + manifest, + }); + + // Expose manifest as a static file when WORKFLOW_PUBLIC_MANIFEST=1. + // Next.js serves files from public/ at the root URL. + if (this.shouldExposePublicManifest && manifestJson) { + const publicManifestDir = join( + this.config.workingDir, + 'public/.well-known/workflow/v1' + ); + await mkdir(publicManifestDir, { recursive: true }); + await copyFile( + join(workflowGeneratedDir, 'manifest.json'), + join(publicManifestDir, 'manifest.json') + ); + } + + await this.writeFunctionsConfig(outputDir); + + if (this.config.watch) { + if (!stepsBuildContext) { + throw new Error( + 'Invariant: expected steps build context in watch mode' + ); + } + if ( + !workflowsBundle?.interimBundleCtx || + !workflowsBundle?.bundleFinal + ) { + throw new Error('Invariant: expected workflows bundle in watch mode'); + } + + let stepsCtx = stepsBuildContext; + // These are safe to assert as non-null because we checked above + let workflowsCtx = { + interimBundleCtx: workflowsBundle.interimBundleCtx!, + bundleFinal: workflowsBundle.bundleFinal!, + }; + + const normalizePath = (pathname: string) => + pathname.replace(/\\/g, '/'); + const knownFiles = new Set(); + type WatchpackTimeInfoEntry = { + safeTime: number; + timestamp?: number; + }; + let previousTimeInfo = new Map(); + + const watchableExtensions = new Set([ + '.js', + '.jsx', + '.ts', + '.tsx', + '.mts', + '.cts', + '.cjs', + '.mjs', + ]); + const ignoredPathFragments = [ + '/.git/', + '/node_modules/', + '/.next/', + '/.turbo/', + '/.vercel/', + '/dist/', + '/build/', + '/out/', + '/.cache/', + '/.yarn/', + '/.pnpm-store/', + '/.parcel-cache/', + '/.well-known/workflow/', + ]; + const normalizedGeneratedDir = workflowGeneratedDir.replace(/\\/g, '/'); + ignoredPathFragments.push(normalizedGeneratedDir); + + // There is a node.js bug on MacOS which causes closing file watchers to be really slow. + // This limits the number of watchers to mitigate the issue. + // https://github.com/nodejs/node/issues/29949 + process.env.WATCHPACK_WATCHER_LIMIT = + process.platform === 'darwin' ? '20' : undefined; + + const watcher = new Watchpack({ + // Watchpack default is 200ms which adds 200ms of dead time on bootup. + aggregateTimeout: 5, + ignored: (pathname: string) => { + const normalizedPath = pathname.replace(/\\/g, '/'); + const extension = extname(normalizedPath); + if (extension && !watchableExtensions.has(extension)) { + return true; + } + if (normalizedPath.startsWith(normalizedGeneratedDir)) { + return true; + } + for (const fragment of ignoredPathFragments) { + if (normalizedPath.includes(fragment)) { + return true; + } + } + return false; + }, + }); + + const readTimeInfoEntries = () => { + const rawEntries = watcher.getTimeInfoEntries() as Map< + string, + WatchpackTimeInfoEntry + >; + const normalizedEntries = new Map(); + for (const [path, info] of rawEntries) { + normalizedEntries.set(normalizePath(path), info); + } + return normalizedEntries; + }; + + let rebuildQueue = Promise.resolve(); + + const enqueue = (task: () => Promise) => { + rebuildQueue = rebuildQueue.then(task).catch((error) => { + console.error('Failed to process file change', error); + }); + return rebuildQueue; + }; + + const fullRebuild = async () => { + const newInputFiles = await this.getInputFiles(); + options.inputFiles = newInputFiles; + + await stepsCtx.dispose(); + const { context: newStepsCtx } = + await this.buildStepsFunction(options); + if (!newStepsCtx) { + throw new Error( + 'Invariant: expected steps build context after rebuild' + ); + } + stepsCtx = newStepsCtx; + + await workflowsCtx.interimBundleCtx.dispose(); + const newWorkflowsCtx = await this.buildWorkflowsFunction(options); + if ( + !newWorkflowsCtx?.interimBundleCtx || + !newWorkflowsCtx?.bundleFinal + ) { + throw new Error( + 'Invariant: expected workflows bundle context after rebuild' + ); + } + workflowsCtx = { + interimBundleCtx: newWorkflowsCtx.interimBundleCtx, + bundleFinal: newWorkflowsCtx.bundleFinal, + }; + }; + + const logBuildMessages = ( + result: { + errors?: import('esbuild').Message[]; + warnings?: import('esbuild').Message[]; + }, + label: string + ) => { + const logByType = ( + messages: import('esbuild').Message[] | undefined, + method: 'error' | 'warn' + ) => { + if (!messages || messages.length === 0) { + return; + } + const descriptor = method === 'error' ? 'errors' : 'warnings'; + console[method](`${descriptor} while rebuilding ${label}`); + for (const message of messages) { + console[method](message); + } + }; + + logByType(result.errors, 'error'); + logByType(result.warnings, 'warn'); + }; + + const rebuildExistingFiles = async () => { + const rebuiltStepStart = Date.now(); + const stepsResult = await stepsCtx.rebuild(); + logBuildMessages(stepsResult, 'steps bundle'); + console.log( + 'Rebuilt steps bundle', + `${Date.now() - rebuiltStepStart}ms` + ); + + const rebuiltWorkflowStart = Date.now(); + const workflowResult = await workflowsCtx.interimBundleCtx.rebuild(); + logBuildMessages(workflowResult, 'workflows bundle'); + + if ( + !workflowResult.outputFiles || + workflowResult.outputFiles.length === 0 + ) { + console.error( + 'No output generated while rebuilding workflows bundle' + ); + return; + } + await workflowsCtx.bundleFinal(workflowResult.outputFiles[0].text); + console.log( + 'Rebuilt workflow bundle', + `${Date.now() - rebuiltWorkflowStart}ms` + ); + }; + + const isWatchableFile = (path: string) => + watchableExtensions.has(extname(path)); + + const getComparableTimestamp = (entry: WatchpackTimeInfoEntry) => + entry.timestamp ?? entry.safeTime; + + const findRemovedFiles = ( + currentEntries: Map, + previousEntries: Map + ) => { + const removed: string[] = []; + for (const path of previousEntries.keys()) { + if (!currentEntries.has(path) && isWatchableFile(path)) { + removed.push(path); + } + } + return removed; + }; + + const findAddedAndModifiedFiles = ( + currentEntries: Map, + previousEntries: Map + ) => { + const added: string[] = []; + const modified: string[] = []; + + for (const [path, info] of currentEntries) { + if (!isWatchableFile(path)) { + continue; + } + + const previous = previousEntries.get(path); + if (!previous) { + added.push(path); + continue; + } + + if ( + getComparableTimestamp(info) !== getComparableTimestamp(previous) + ) { + modified.push(path); + } + } + + return { added, modified }; + }; + + const determineFileChanges = ( + currentEntries: Map, + previousEntries: Map + ) => { + const removedFiles = findRemovedFiles( + currentEntries, + previousEntries + ); + const { added, modified } = findAddedAndModifiedFiles( + currentEntries, + previousEntries + ); + + return { + addedFiles: added, + modifiedFiles: modified, + removedFiles, + }; + }; + + let isInitial = true; + + watcher.on('aggregated', () => { + const currentEntries = readTimeInfoEntries(); + const { addedFiles, modifiedFiles, removedFiles } = + determineFileChanges(currentEntries, previousTimeInfo); + + previousTimeInfo = currentEntries; + + if (isInitial) { + isInitial = false; + return; + } + + if ( + addedFiles.length === 0 && + modifiedFiles.length === 0 && + removedFiles.length === 0 + ) { + return; + } + + for (const removal of removedFiles) { + knownFiles.delete(removal); + } + for (const added of addedFiles) { + knownFiles.add(added); + } + + enqueue(async () => { + if (addedFiles.length > 0 || removedFiles.length > 0) { + await fullRebuild(); + return; + } + + if (modifiedFiles.length > 0) { + await rebuildExistingFiles(); + } + }); + }); + + watcher.watch({ + directories: [this.config.workingDir], + startTime: 0, + }); + } + } + + protected async getInputFiles(): Promise { + const inputFiles = await super.getInputFiles(); + return inputFiles.filter((item) => { + // Match App Router entrypoints: route.ts, page.ts, layout.ts in app/ or src/app/ directories + // Matches: /app/page.ts, /app/dashboard/page.ts, /src/app/route.ts, etc. + if ( + item.match( + /(^|.*[/\\])(app|src[/\\]app)([/\\](route|page|layout)\.|[/\\].*[/\\](route|page|layout)\.)/ + ) + ) { + return true; + } + // Match Pages Router entrypoints: files in pages/ or src/pages/ + if (item.match(/[/\\](pages|src[/\\]pages)[/\\]/)) { + return true; + } + return false; + }); + } + + private async writeFunctionsConfig(outputDir: string) { + // we don't run this in development mode as it's not needed + if (process.env.NODE_ENV === 'development') { + return; + } + const generatedConfig = { + version: '0', + steps: { + experimentalTriggers: [STEP_QUEUE_TRIGGER], + }, + workflows: { + experimentalTriggers: [WORKFLOW_QUEUE_TRIGGER], + }, + }; + + // We write this file to the generated directory for + // the Next.js builder to consume + await writeFile( + join(outputDir, '.well-known/workflow/v1/config.json'), + JSON.stringify(generatedConfig, null, 2) + ); + } + + private async buildStepsFunction({ + inputFiles, + workflowGeneratedDir, + tsconfigPath, + }: { + inputFiles: string[]; + workflowGeneratedDir: string; + tsconfigPath?: string; + }) { + // Create steps bundle + const stepsRouteDir = join(workflowGeneratedDir, 'step'); + await mkdir(stepsRouteDir, { recursive: true }); + return await this.createStepsBundle({ + // If any dynamic requires are used when bundling with ESM + // esbuild will create a too dynamic wrapper around require + // which turbopack/webpack fail to analyze. If we externalize + // correctly this shouldn't be an issue although we might want + // to use cjs as alternative to avoid + format: 'esm', + inputFiles, + outfile: join(stepsRouteDir, 'route.js'), + externalizeNonSteps: true, + tsconfigPath, + }); + } + + private async buildWorkflowsFunction({ + inputFiles, + workflowGeneratedDir, + tsconfigPath, + }: { + inputFiles: string[]; + workflowGeneratedDir: string; + tsconfigPath?: string; + }) { + const workflowsRouteDir = join(workflowGeneratedDir, 'flow'); + await mkdir(workflowsRouteDir, { recursive: true }); + return await this.createWorkflowsBundle({ + format: 'esm', + outfile: join(workflowsRouteDir, 'route.js'), + bundleFinalOutput: false, + inputFiles, + tsconfigPath, + }); + } + + private async buildWebhookRoute({ + workflowGeneratedDir, + }: { + workflowGeneratedDir: string; + }): Promise { + const webhookRouteFile = join( + workflowGeneratedDir, + 'webhook/[token]/route.js' + ); + await this.createWebhookBundle({ + outfile: webhookRouteFile, + bundle: false, // Next.js doesn't need bundling + }); + } + + private async findAppDirectory(): Promise { + const appDir = resolve(this.config.workingDir, 'app'); + const srcAppDir = resolve(this.config.workingDir, 'src/app'); + const pagesDir = resolve(this.config.workingDir, 'pages'); + const srcPagesDir = resolve(this.config.workingDir, 'src/pages'); + + // Helper to check if a path exists and is a directory + const isDirectory = async (path: string): Promise => { + try { + await access(path, constants.F_OK); + const stats = await stat(path); + if (!stats.isDirectory()) { + throw new Error(`Path exists but is not a directory: ${path}`); + } + return true; + } catch (e) { + if (e instanceof Error && e.message.includes('not a directory')) { + throw e; + } + return false; + } + }; + + // Check if app directory exists + if (await isDirectory(appDir)) { + return appDir; + } + + // Check if src/app directory exists + if (await isDirectory(srcAppDir)) { + return srcAppDir; + } + + // If no app directory exists, check for pages directory and create app next to it + if (await isDirectory(pagesDir)) { + // Create app directory next to pages directory + await mkdir(appDir, { recursive: true }); + return appDir; + } + + if (await isDirectory(srcPagesDir)) { + // Create src/app directory next to src/pages directory + await mkdir(srcAppDir, { recursive: true }); + return srcAppDir; + } + + throw new Error( + 'Could not find Next.js app or pages directory. Expected one of: "app", "src/app", "pages", or "src/pages" to exist.' + ); + } + } + + CachedNextBuilderEager = NextBuilder; + return NextBuilder; +} diff --git a/packages/next/src/builder.ts b/packages/next/src/builder.ts index 02484c9995..6665bf7706 100644 --- a/packages/next/src/builder.ts +++ b/packages/next/src/builder.ts @@ -1,549 +1,23 @@ -import { constants } from 'node:fs'; -import { access, copyFile, mkdir, stat, writeFile } from 'node:fs/promises'; -import { extname, join, resolve } from 'node:path'; -import Watchpack from 'watchpack'; +import semver from 'semver'; +import { getNextBuilderDeferred } from './builder-deferred.js'; +import { getNextBuilderEager } from './builder-eager.js'; -let CachedNextBuilder: any; +export const DEFERRED_BUILDER_MIN_VERSION = '16.2.0-canary.30'; -// Create the NextBuilder class dynamically by extending the ESM BaseBuilder -// This is exported as getNextBuilder() to allow CommonJS modules to import -// from the ESM @workflow/builders package via dynamic import at runtime -export async function getNextBuilder() { - if (CachedNextBuilder) { - return CachedNextBuilder; - } - - const { - BaseBuilder: BaseBuilderClass, - STEP_QUEUE_TRIGGER, - WORKFLOW_QUEUE_TRIGGER, - // biome-ignore lint/security/noGlobalEval: Need to use eval here to avoid TypeScript from transpiling the import statement into `require()` - } = (await eval( - 'import("@workflow/builders")' - )) as typeof import('@workflow/builders'); - - class NextBuilder extends BaseBuilderClass { - async build() { - const outputDir = await this.findAppDirectory(); - const workflowGeneratedDir = join(outputDir, '.well-known/workflow/v1'); - - // Ensure output directories exist - await mkdir(workflowGeneratedDir, { recursive: true }); - // ignore the generated assets - - await writeFile(join(workflowGeneratedDir, '.gitignore'), '*'); - - const inputFiles = await this.getInputFiles(); - const tsconfigPath = await this.findTsConfigPath(); - - const options = { - inputFiles, - workflowGeneratedDir, - tsconfigPath, - }; - - const { manifest: stepsManifest, context: stepsBuildContext } = - await this.buildStepsFunction(options); - const workflowsBundle = await this.buildWorkflowsFunction(options); - await this.buildWebhookRoute({ workflowGeneratedDir }); - - // Merge manifests from both bundles - const manifest = { - steps: { ...stepsManifest.steps, ...workflowsBundle?.manifest?.steps }, - workflows: { - ...stepsManifest.workflows, - ...workflowsBundle?.manifest?.workflows, - }, - classes: { - ...stepsManifest.classes, - ...workflowsBundle?.manifest?.classes, - }, - }; - - // Write unified manifest to workflow generated directory - const workflowBundlePath = join(workflowGeneratedDir, 'flow/route.js'); - const manifestJson = await this.createManifest({ - workflowBundlePath, - manifestDir: workflowGeneratedDir, - manifest, - }); - - // Expose manifest as a static file when WORKFLOW_PUBLIC_MANIFEST=1. - // Next.js serves files from public/ at the root URL. - if (this.shouldExposePublicManifest && manifestJson) { - const publicManifestDir = join( - this.config.workingDir, - 'public/.well-known/workflow/v1' - ); - await mkdir(publicManifestDir, { recursive: true }); - await copyFile( - join(workflowGeneratedDir, 'manifest.json'), - join(publicManifestDir, 'manifest.json') - ); - } - - await this.writeFunctionsConfig(outputDir); - - if (this.config.watch) { - if (!stepsBuildContext) { - throw new Error( - 'Invariant: expected steps build context in watch mode' - ); - } - if ( - !workflowsBundle?.interimBundleCtx || - !workflowsBundle?.bundleFinal - ) { - throw new Error('Invariant: expected workflows bundle in watch mode'); - } - - let stepsCtx = stepsBuildContext; - // These are safe to assert as non-null because we checked above - let workflowsCtx = { - interimBundleCtx: workflowsBundle.interimBundleCtx!, - bundleFinal: workflowsBundle.bundleFinal!, - }; - - const normalizePath = (pathname: string) => - pathname.replace(/\\/g, '/'); - const knownFiles = new Set(); - type WatchpackTimeInfoEntry = { - safeTime: number; - timestamp?: number; - }; - let previousTimeInfo = new Map(); - - const watchableExtensions = new Set([ - '.js', - '.jsx', - '.ts', - '.tsx', - '.mts', - '.cts', - '.cjs', - '.mjs', - ]); - const ignoredPathFragments = [ - '/.git/', - '/node_modules/', - '/.next/', - '/.turbo/', - '/.vercel/', - '/dist/', - '/build/', - '/out/', - '/.cache/', - '/.yarn/', - '/.pnpm-store/', - '/.parcel-cache/', - '/.well-known/workflow/', - ]; - const normalizedGeneratedDir = workflowGeneratedDir.replace(/\\/g, '/'); - ignoredPathFragments.push(normalizedGeneratedDir); - - // There is a node.js bug on MacOS which causes closing file watchers to be really slow. - // This limits the number of watchers to mitigate the issue. - // https://github.com/nodejs/node/issues/29949 - process.env.WATCHPACK_WATCHER_LIMIT = - process.platform === 'darwin' ? '20' : undefined; - - const watcher = new Watchpack({ - // Watchpack default is 200ms which adds 200ms of dead time on bootup. - aggregateTimeout: 5, - ignored: (pathname: string) => { - const normalizedPath = pathname.replace(/\\/g, '/'); - const extension = extname(normalizedPath); - if (extension && !watchableExtensions.has(extension)) { - return true; - } - if (normalizedPath.startsWith(normalizedGeneratedDir)) { - return true; - } - for (const fragment of ignoredPathFragments) { - if (normalizedPath.includes(fragment)) { - return true; - } - } - return false; - }, - }); - - const readTimeInfoEntries = () => { - const rawEntries = watcher.getTimeInfoEntries() as Map< - string, - WatchpackTimeInfoEntry - >; - const normalizedEntries = new Map(); - for (const [path, info] of rawEntries) { - normalizedEntries.set(normalizePath(path), info); - } - return normalizedEntries; - }; - - let rebuildQueue = Promise.resolve(); - - const enqueue = (task: () => Promise) => { - rebuildQueue = rebuildQueue.then(task).catch((error) => { - console.error('Failed to process file change', error); - }); - return rebuildQueue; - }; - - const fullRebuild = async () => { - const newInputFiles = await this.getInputFiles(); - options.inputFiles = newInputFiles; - - await stepsCtx.dispose(); - const { context: newStepsCtx } = - await this.buildStepsFunction(options); - if (!newStepsCtx) { - throw new Error( - 'Invariant: expected steps build context after rebuild' - ); - } - stepsCtx = newStepsCtx; - - await workflowsCtx.interimBundleCtx.dispose(); - const newWorkflowsCtx = await this.buildWorkflowsFunction(options); - if ( - !newWorkflowsCtx?.interimBundleCtx || - !newWorkflowsCtx?.bundleFinal - ) { - throw new Error( - 'Invariant: expected workflows bundle context after rebuild' - ); - } - workflowsCtx = { - interimBundleCtx: newWorkflowsCtx.interimBundleCtx, - bundleFinal: newWorkflowsCtx.bundleFinal, - }; - }; - - const logBuildMessages = ( - result: { - errors?: import('esbuild').Message[]; - warnings?: import('esbuild').Message[]; - }, - label: string - ) => { - const logByType = ( - messages: import('esbuild').Message[] | undefined, - method: 'error' | 'warn' - ) => { - if (!messages || messages.length === 0) { - return; - } - const descriptor = method === 'error' ? 'errors' : 'warnings'; - console[method](`${descriptor} while rebuilding ${label}`); - for (const message of messages) { - console[method](message); - } - }; - - logByType(result.errors, 'error'); - logByType(result.warnings, 'warn'); - }; - - const rebuildExistingFiles = async () => { - const rebuiltStepStart = Date.now(); - const stepsResult = await stepsCtx.rebuild(); - logBuildMessages(stepsResult, 'steps bundle'); - console.log( - 'Rebuilt steps bundle', - `${Date.now() - rebuiltStepStart}ms` - ); - - const rebuiltWorkflowStart = Date.now(); - const workflowResult = await workflowsCtx.interimBundleCtx.rebuild(); - logBuildMessages(workflowResult, 'workflows bundle'); - - if ( - !workflowResult.outputFiles || - workflowResult.outputFiles.length === 0 - ) { - console.error( - 'No output generated while rebuilding workflows bundle' - ); - return; - } - await workflowsCtx.bundleFinal(workflowResult.outputFiles[0].text); - console.log( - 'Rebuilt workflow bundle', - `${Date.now() - rebuiltWorkflowStart}ms` - ); - }; - - const isWatchableFile = (path: string) => - watchableExtensions.has(extname(path)); +export const WORKFLOW_DEFERRED_ENTRIES = [ + '/.well-known/workflow/v1/flow', + '/.well-known/workflow/v1/step', + '/.well-known/workflow/v1/webhook/[token]', +] as const; - const getComparableTimestamp = (entry: WatchpackTimeInfoEntry) => - entry.timestamp ?? entry.safeTime; - - const findRemovedFiles = ( - currentEntries: Map, - previousEntries: Map - ) => { - const removed: string[] = []; - for (const path of previousEntries.keys()) { - if (!currentEntries.has(path) && isWatchableFile(path)) { - removed.push(path); - } - } - return removed; - }; - - const findAddedAndModifiedFiles = ( - currentEntries: Map, - previousEntries: Map - ) => { - const added: string[] = []; - const modified: string[] = []; - - for (const [path, info] of currentEntries) { - if (!isWatchableFile(path)) { - continue; - } - - const previous = previousEntries.get(path); - if (!previous) { - added.push(path); - continue; - } - - if ( - getComparableTimestamp(info) !== getComparableTimestamp(previous) - ) { - modified.push(path); - } - } - - return { added, modified }; - }; - - const determineFileChanges = ( - currentEntries: Map, - previousEntries: Map - ) => { - const removedFiles = findRemovedFiles( - currentEntries, - previousEntries - ); - const { added, modified } = findAddedAndModifiedFiles( - currentEntries, - previousEntries - ); - - return { - addedFiles: added, - modifiedFiles: modified, - removedFiles, - }; - }; - - let isInitial = true; - - watcher.on('aggregated', () => { - const currentEntries = readTimeInfoEntries(); - const { addedFiles, modifiedFiles, removedFiles } = - determineFileChanges(currentEntries, previousTimeInfo); - - previousTimeInfo = currentEntries; - - if (isInitial) { - isInitial = false; - return; - } - - if ( - addedFiles.length === 0 && - modifiedFiles.length === 0 && - removedFiles.length === 0 - ) { - return; - } - - for (const removal of removedFiles) { - knownFiles.delete(removal); - } - for (const added of addedFiles) { - knownFiles.add(added); - } - - enqueue(async () => { - if (addedFiles.length > 0 || removedFiles.length > 0) { - await fullRebuild(); - return; - } - - if (modifiedFiles.length > 0) { - await rebuildExistingFiles(); - } - }); - }); - - watcher.watch({ - directories: [this.config.workingDir], - startTime: 0, - }); - } - } - - protected async getInputFiles(): Promise { - const inputFiles = await super.getInputFiles(); - return inputFiles.filter((item) => { - // Match App Router entrypoints: route.ts, page.ts, layout.ts in app/ or src/app/ directories - // Matches: /app/page.ts, /app/dashboard/page.ts, /src/app/route.ts, etc. - if ( - item.match( - /(^|.*[/\\])(app|src[/\\]app)([/\\](route|page|layout)\.|[/\\].*[/\\](route|page|layout)\.)/ - ) - ) { - return true; - } - // Match Pages Router entrypoints: files in pages/ or src/pages/ - if (item.match(/[/\\](pages|src[/\\]pages)[/\\]/)) { - return true; - } - return false; - }); - } - - private async writeFunctionsConfig(outputDir: string) { - // we don't run this in development mode as it's not needed - if (process.env.NODE_ENV === 'development') { - return; - } - const generatedConfig = { - version: '0', - steps: { - experimentalTriggers: [STEP_QUEUE_TRIGGER], - }, - workflows: { - experimentalTriggers: [WORKFLOW_QUEUE_TRIGGER], - }, - }; - - // We write this file to the generated directory for - // the Next.js builder to consume - await writeFile( - join(outputDir, '.well-known/workflow/v1/config.json'), - JSON.stringify(generatedConfig, null, 2) - ); - } - - private async buildStepsFunction({ - inputFiles, - workflowGeneratedDir, - tsconfigPath, - }: { - inputFiles: string[]; - workflowGeneratedDir: string; - tsconfigPath?: string; - }) { - // Create steps bundle - const stepsRouteDir = join(workflowGeneratedDir, 'step'); - await mkdir(stepsRouteDir, { recursive: true }); - return await this.createStepsBundle({ - // If any dynamic requires are used when bundling with ESM - // esbuild will create a too dynamic wrapper around require - // which turbopack/webpack fail to analyze. If we externalize - // correctly this shouldn't be an issue although we might want - // to use cjs as alternative to avoid - format: 'esm', - inputFiles, - outfile: join(stepsRouteDir, 'route.js'), - externalizeNonSteps: true, - tsconfigPath, - }); - } - - private async buildWorkflowsFunction({ - inputFiles, - workflowGeneratedDir, - tsconfigPath, - }: { - inputFiles: string[]; - workflowGeneratedDir: string; - tsconfigPath?: string; - }) { - const workflowsRouteDir = join(workflowGeneratedDir, 'flow'); - await mkdir(workflowsRouteDir, { recursive: true }); - return await this.createWorkflowsBundle({ - format: 'esm', - outfile: join(workflowsRouteDir, 'route.js'), - bundleFinalOutput: false, - inputFiles, - tsconfigPath, - }); - } - - private async buildWebhookRoute({ - workflowGeneratedDir, - }: { - workflowGeneratedDir: string; - }): Promise { - const webhookRouteFile = join( - workflowGeneratedDir, - 'webhook/[token]/route.js' - ); - await this.createWebhookBundle({ - outfile: webhookRouteFile, - bundle: false, // Next.js doesn't need bundling - }); - } - - private async findAppDirectory(): Promise { - const appDir = resolve(this.config.workingDir, 'app'); - const srcAppDir = resolve(this.config.workingDir, 'src/app'); - const pagesDir = resolve(this.config.workingDir, 'pages'); - const srcPagesDir = resolve(this.config.workingDir, 'src/pages'); - - // Helper to check if a path exists and is a directory - const isDirectory = async (path: string): Promise => { - try { - await access(path, constants.F_OK); - const stats = await stat(path); - if (!stats.isDirectory()) { - throw new Error(`Path exists but is not a directory: ${path}`); - } - return true; - } catch (e) { - if (e instanceof Error && e.message.includes('not a directory')) { - throw e; - } - return false; - } - }; - - // Check if app directory exists - if (await isDirectory(appDir)) { - return appDir; - } - - // Check if src/app directory exists - if (await isDirectory(srcAppDir)) { - return srcAppDir; - } - - // If no app directory exists, check for pages directory and create app next to it - if (await isDirectory(pagesDir)) { - // Create app directory next to pages directory - await mkdir(appDir, { recursive: true }); - return appDir; - } - - if (await isDirectory(srcPagesDir)) { - // Create src/app directory next to src/pages directory - await mkdir(srcAppDir, { recursive: true }); - return srcAppDir; - } +export function shouldUseDeferredBuilder(nextVersion: string): boolean { + return semver.gte(nextVersion, DEFERRED_BUILDER_MIN_VERSION); +} - throw new Error( - 'Could not find Next.js app or pages directory. Expected one of: "app", "src/app", "pages", or "src/pages" to exist.' - ); - } +export async function getNextBuilder(nextVersion: string) { + if (shouldUseDeferredBuilder(nextVersion)) { + return getNextBuilderDeferred(); } - CachedNextBuilder = NextBuilder; - return NextBuilder; + return getNextBuilderEager(); } diff --git a/packages/next/src/index.ts b/packages/next/src/index.ts index 1d96e05615..37f4ef382b 100644 --- a/packages/next/src/index.ts +++ b/packages/next/src/index.ts @@ -1,7 +1,11 @@ import type { NextConfig } from 'next'; import path from 'path'; import semver from 'semver'; -import { getNextBuilder } from './builder.js'; +import { + getNextBuilder, + shouldUseDeferredBuilder, + WORKFLOW_DEFERRED_ENTRIES, +} from './builder.js'; import { maybeInvalidateCacheOnSwcChange } from './swc-cache.js'; export function withWorkflow( @@ -42,6 +46,7 @@ export function withWorkflow( ctx: { defaultConfig: NextConfig } ) { const loaderPath = require.resolve('./loader'); + let runDeferredBuildFromCallback: (() => Promise) | undefined; let nextConfig: NextConfig; @@ -63,6 +68,87 @@ export function withWorkflow( const existingRules = nextConfig.turbopack.rules as any; const nextVersion = require('next/package.json').version; const supportsTurboCondition = semver.gte(nextVersion, 'v16.0.0'); + const useDeferredBuilder = shouldUseDeferredBuilder(nextVersion); + // Deferred builder discovers files via loader socket notifications, so + // turbopack content conditions are only needed with the eager builder. + const shouldApplyTurboCondition = + supportsTurboCondition && !useDeferredBuilder; + const shouldWatch = process.env.NODE_ENV === 'development'; + let workflowBuilderPromise: Promise | undefined; + + const getWorkflowBuilder = async () => { + if (!workflowBuilderPromise) { + workflowBuilderPromise = (async () => { + const NextBuilder = await getNextBuilder(nextVersion); + return new NextBuilder({ + watch: shouldWatch, + // discover workflows from pages/app entries + dirs: ['pages', 'app', 'src/pages', 'src/app'], + workingDir: process.cwd(), + distDir: nextConfig.distDir || '.next', + buildTarget: 'next', + workflowsBundlePath: '', // not used in base + stepsBundlePath: '', // not used in base + webhookBundlePath: '', // node used in base + externalPackages: [ + // server-only and client-only are pseudo-packages handled by Next.js + // during its build process. We mark them as external to prevent esbuild + // from failing when bundling code that imports them. + // See: https://nextjs.org/docs/app/getting-started/server-and-client-components + 'server-only', + 'client-only', + ...(nextConfig.serverExternalPackages || []), + ], + }); + })(); + } + + return workflowBuilderPromise; + }; + + if (useDeferredBuilder) { + runDeferredBuildFromCallback = async () => { + const workflowBuilder = await getWorkflowBuilder(); + if (typeof workflowBuilder.onBeforeDeferredEntries === 'function') { + await workflowBuilder.onBeforeDeferredEntries(); + } + }; + + const existingExperimental = (nextConfig.experimental ?? {}) as Record< + string, + any + >; + const existingDeferredEntries = Array.isArray( + existingExperimental.deferredEntries + ) + ? existingExperimental.deferredEntries + : []; + const existingOnBeforeDeferredEntries = + typeof existingExperimental.onBeforeDeferredEntries === 'function' + ? existingExperimental.onBeforeDeferredEntries + : undefined; + + nextConfig.experimental = { + ...existingExperimental, + + // biome-ignore lint/suspicious/noTsIgnore: expect-error is wrong as it will work on valid version + // @ts-ignore this is only available in canary Next.js + deferredEntries: [ + ...new Set([ + ...existingDeferredEntries, + ...WORKFLOW_DEFERRED_ENTRIES, + ]), + ], + onBeforeDeferredEntries: async (...args: unknown[]) => { + if (existingOnBeforeDeferredEntries) { + await existingOnBeforeDeferredEntries(...args); + } + if (runDeferredBuildFromCallback) { + await runDeferredBuildFromCallback(); + } + }, + }; + } for (const key of [ '*.tsx', @@ -75,7 +161,7 @@ export function withWorkflow( '*.cts', ]) { nextConfig.turbopack.rules[key] = { - ...(supportsTurboCondition + ...(shouldApplyTurboCondition ? { condition: { // Use 'all' to combine: must match content AND must NOT be in generated path @@ -131,28 +217,7 @@ export function withWorkflow( nextConfig.distDir || '.next' ); maybeInvalidateCacheOnSwcChange(distDir); - - const shouldWatch = process.env.NODE_ENV === 'development'; - const NextBuilder = await getNextBuilder(); - const workflowBuilder = new NextBuilder({ - watch: shouldWatch, - // discover workflows from pages/app entries - dirs: ['pages', 'app', 'src/pages', 'src/app'], - workingDir: process.cwd(), - buildTarget: 'next', - workflowsBundlePath: '', // not used in base - stepsBundlePath: '', // not used in base - webhookBundlePath: '', // node used in base - externalPackages: [ - // server-only and client-only are pseudo-packages handled by Next.js - // during its build process. We mark them as external to prevent esbuild - // from failing when bundling code that imports them. - // See: https://nextjs.org/docs/app/getting-started/server-and-client-components - 'server-only', - 'client-only', - ...(nextConfig.serverExternalPackages || []), - ], - }); + const workflowBuilder = await getWorkflowBuilder(); await workflowBuilder.build(); process.env.WORKFLOW_NEXT_PRIVATE_BUILT = '1'; diff --git a/packages/next/src/loader.ts b/packages/next/src/loader.ts index fd10db13cd..6b4ffe9fe7 100644 --- a/packages/next/src/loader.ts +++ b/packages/next/src/loader.ts @@ -1,15 +1,166 @@ +import { connect, type Socket } from 'node:net'; import { relative } from 'node:path'; import { transform } from '@swc/core'; +import { type SocketMessage, serializeMessage } from './socket-server.js'; type DecoratorOptions = import('@workflow/builders').DecoratorOptions; type WorkflowPatternMatch = import('@workflow/builders').WorkflowPatternMatch; // Cache decorator options per working directory to avoid reading tsconfig for every file const decoratorOptionsCache = new Map>(); - // Cache for shared utilities from @workflow/builders (ESM module loaded dynamically in CommonJS context) let cachedBuildersModule: typeof import('@workflow/builders') | null = null; +// Cache socket connection to avoid reconnecting on every file. +let socketClientPromise: Promise | null = null; +let socketClient: Socket | null = null; + +function resetSocketClient(cachedSocket?: Socket): void { + if (cachedSocket && socketClient && socketClient !== cachedSocket) { + return; + } + + socketClientPromise = null; + socketClient = null; +} + +async function writeSocketMessage( + socket: Socket, + message: string +): Promise { + await new Promise((resolve, reject) => { + socket.write(message, (error?: Error | null) => { + if (error) { + reject(error); + return; + } + resolve(); + }); + }); +} + +function shouldUseSocketDiscovery(): boolean { + return Boolean( + process.env.WORKFLOW_SOCKET_PORT && process.env.WORKFLOW_SOCKET_AUTH + ); +} + +async function getSocketClient(): Promise { + if (!shouldUseSocketDiscovery()) { + return null; + } + + if (socketClient?.destroyed) { + resetSocketClient(socketClient); + } + + if (!socketClientPromise) { + socketClientPromise = (async () => { + try { + const socketPort = process.env.WORKFLOW_SOCKET_PORT; + if (!socketPort) { + throw new Error( + 'Invariant: no socket port provided for workflow loader' + ); + } + + const port = Number.parseInt(socketPort, 10); + if (Number.isNaN(port)) { + throw new Error( + `Invariant: invalid socket port provided: ${socketPort}` + ); + } + + const socket = connect({ port, host: '127.0.0.1' }); + + // Wait for connection + await new Promise((resolve, reject) => { + const onConnect = () => { + socket.setNoDelay(true); + cleanup(); + resolve(); + }; + const onError = (error: Error) => { + cleanup(); + reject(error); + }; + const timeout = setTimeout(() => { + cleanup(); + socket.destroy(); + reject(new Error('Socket connection timeout')); + }, 1000); + const cleanup = () => { + clearTimeout(timeout); + socket.off('connect', onConnect); + socket.off('error', onError); + }; + + socket.on('connect', onConnect); + socket.on('error', onError); + }); + + socket.on('close', () => { + resetSocketClient(socket); + }); + socket.on('error', () => { + resetSocketClient(socket); + }); + + socketClient = socket; + return socket; + } catch (error) { + resetSocketClient(); + throw error; + } + })(); + } + + return socketClientPromise; +} + +async function notifySocketServer( + filename: string, + hasWorkflow: boolean, + hasStep: boolean, + hasSerde: boolean +): Promise { + if (!shouldUseSocketDiscovery()) { + return; + } + + const socket = await getSocketClient(); + if (!socket) { + throw new Error('Invariant: missing workflow socket connection'); + } + + const authToken = process.env.WORKFLOW_SOCKET_AUTH; + if (!authToken) { + throw new Error( + 'Invariant: no socket auth token provided for workflow loader' + ); + } + + const message: SocketMessage = { + type: 'file-discovered', + filePath: filename, + hasWorkflow, + hasStep, + hasSerde, + }; + const serializedMessage = serializeMessage(message, authToken); + + try { + await writeSocketMessage(socket, serializedMessage); + } catch (error) { + resetSocketClient(socket); + const reconnectedSocket = await getSocketClient(); + if (!reconnectedSocket) { + throw error; + } + await writeSocketMessage(reconnectedSocket, serializedMessage); + } +} + async function getBuildersModule(): Promise< typeof import('@workflow/builders') > { @@ -72,6 +223,14 @@ async function getModuleSpecifier( return resolveModuleSpecifier(filePath, projectRoot).moduleSpecifier; } +async function resolveWorkflowAliasPath( + filePath: string, + workingDir: string +): Promise { + const { resolveWorkflowAliasRelativePath } = await getBuildersModule(); + return resolveWorkflowAliasRelativePath(filePath, workingDir); +} + // This loader applies the "use workflow"/"use step" // client transformation export default async function workflowLoader( @@ -91,6 +250,14 @@ export default async function workflowLoader( // Detect workflow patterns in the source code const patterns = await detectPatterns(normalizedSource); + // Always notify discovery tracking, even for `false/false`, so files that + // previously had workflow/step usage are removed from the tracked sets. + await notifySocketServer( + filename, + patterns.hasUseWorkflow, + patterns.hasUseStep, + patterns.hasSerde + ); // For @workflow SDK packages, only transform files with actual directives, // not files that just match serde patterns (which are internal SDK implementation files) @@ -136,10 +303,18 @@ export default async function workflowLoader( relativeFilename = relative(workingDir, filename).replace(/\\/g, '/'); if (relativeFilename.startsWith('../')) { - relativeFilename = relativeFilename - .split('/') - .filter((part) => part !== '..') - .join('/'); + const aliasedRelativePath = await resolveWorkflowAliasPath( + filename, + workingDir + ); + if (aliasedRelativePath) { + relativeFilename = aliasedRelativePath; + } else { + relativeFilename = relativeFilename + .split('/') + .filter((part) => part !== '..') + .join('/'); + } } } diff --git a/packages/next/src/socket-server.ts b/packages/next/src/socket-server.ts new file mode 100644 index 0000000000..777a2619c5 --- /dev/null +++ b/packages/next/src/socket-server.ts @@ -0,0 +1,194 @@ +import { randomBytes } from 'node:crypto'; +import { createServer, type Server, type Socket } from 'node:net'; + +/** + * Magic preamble that must prefix all messages to authenticate them as workflow messages. + * This prevents accidental processing of messages from port scanners or other local processes. + */ +const MESSAGE_PREAMBLE = 'WF:'; + +/** + * Generate a random authentication token for this server session. + * Clients must include this token in all messages. + */ +function generateAuthToken(): string { + return randomBytes(16).toString('hex'); +} + +/** + * Message types that can be sent between loader and builder + */ +export type SocketMessage = + | { + type: 'file-discovered'; + filePath: string; + hasWorkflow: boolean; + hasStep: boolean; + hasSerde: boolean; + } + | { type: 'trigger-build' } + | { type: 'build-complete' }; + +/** + * Configuration for the socket server + */ +export interface SocketServerConfig { + isDevServer: boolean; + onFileDiscovered: ( + filePath: string, + hasWorkflow: boolean, + hasStep: boolean, + hasSerde: boolean + ) => void; + onTriggerBuild: () => void; +} + +/** + * Interface for the socket IO instance returned by createSocketServer + */ +export interface SocketIO { + emit(event: 'build-complete'): void; + getAuthToken(): string; +} + +/** + * Serialize a message with authentication preamble + */ +export function serializeMessage( + message: SocketMessage, + authToken: string +): string { + return `${MESSAGE_PREAMBLE}${authToken}:${JSON.stringify(message)}\n`; +} + +/** + * Parse and authenticate a message from the socket + * Returns the parsed message if valid, null otherwise + */ +export function parseMessage( + line: string, + authToken: string +): SocketMessage | null { + const trimmed = line.trim(); + if (!trimmed) { + return null; + } + + // Check for preamble + if (!trimmed.startsWith(MESSAGE_PREAMBLE)) { + console.warn('Received message without valid preamble, ignoring'); + return null; + } + + // Extract auth token and payload + const withoutPreamble = trimmed.slice(MESSAGE_PREAMBLE.length); + const colonIndex = withoutPreamble.indexOf(':'); + if (colonIndex === -1) { + console.warn('Received message without auth token separator, ignoring'); + return null; + } + + const messageToken = withoutPreamble.slice(0, colonIndex); + const payload = withoutPreamble.slice(colonIndex + 1); + + // Verify auth token + if (messageToken !== authToken) { + console.warn('Received message with invalid auth token, ignoring'); + return null; + } + + // Parse JSON payload + try { + return JSON.parse(payload) as SocketMessage; + } catch (error) { + console.error('Failed to parse socket message JSON:', error); + return null; + } +} + +/** + * Create a TCP socket server for loader<->builder communication. + * Returns a SocketIO interface for broadcasting messages and the auth token. + * + * SECURITY: Server listens on 127.0.0.1 (localhost only) and uses + * message authentication to prevent processing of unauthorized messages. + */ +export async function createSocketServer( + config: SocketServerConfig +): Promise { + const authToken = generateAuthToken(); + const clients = new Set(); + let buildTriggered = false; + + const server: Server = createServer((socket: Socket) => { + socket.setNoDelay(true); + clients.add(socket); + + // Send build-complete if build already finished (production mode) + if (buildTriggered && !config.isDevServer) { + socket.write(serializeMessage({ type: 'build-complete' }, authToken)); + } + + let buffer = ''; + + socket.on('data', (data: Buffer) => { + buffer += data.toString(); + + // Process complete messages (newline-delimited) + let newlineIndex = buffer.indexOf('\n'); + while (newlineIndex !== -1) { + const line = buffer.slice(0, newlineIndex); + buffer = buffer.slice(newlineIndex + 1); + newlineIndex = buffer.indexOf('\n'); + + const message = parseMessage(line, authToken); + if (!message) { + continue; + } + + if (message.type === 'file-discovered') { + config.onFileDiscovered( + message.filePath, + message.hasWorkflow, + message.hasStep, + message.hasSerde + ); + } else if (message.type === 'trigger-build') { + config.onTriggerBuild(); + } + } + }); + + socket.on('end', () => { + clients.delete(socket); + }); + + socket.on('error', (err: Error) => { + console.error('Socket error:', err); + clients.delete(socket); + }); + }); + + // Listen on random available port (localhost only) + await new Promise((resolve) => { + server.listen(0, '127.0.0.1', () => { + const address = server.address(); + if (address && typeof address === 'object') { + process.env.WORKFLOW_SOCKET_PORT = String(address.port); + process.env.WORKFLOW_SOCKET_AUTH = authToken; + } + resolve(); + }); + }); + + return { + emit: (_event: 'build-complete') => { + buildTriggered = true; + const message = serializeMessage({ type: 'build-complete' }, authToken); + for (const client of clients) { + client.write(message); + } + }, + getAuthToken: () => authToken, + }; +} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index cf5b0bf5f7..0f5cd3f4fc 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -2817,6 +2817,9 @@ packages: '@emnapi/runtime@1.5.0': resolution: {integrity: sha512-97/BJ3iXHww3djw6hYIfErCZFee7qCtrneuLa20UXFCOTCfBM2cvQHjWJ2EG0s0MtdNwInarqCTz35i4wWXHsQ==} + '@emnapi/runtime@1.8.1': + resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==} + '@emnapi/wasi-threads@1.1.0': resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} @@ -3573,122 +3576,255 @@ packages: cpu: [arm64] os: [darwin] + '@img/sharp-darwin-arm64@0.34.5': + resolution: {integrity: sha512-imtQ3WMJXbMY4fxb/Ndp6HBTNVtWCUI0WdobyheGf5+ad6xX8VIDO8u2xE4qc/fr08CKG/7dDseFtn6M6g/r3w==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [darwin] + '@img/sharp-darwin-x64@0.34.4': resolution: {integrity: sha512-rZheupWIoa3+SOdF/IcUe1ah4ZDpKBGWcsPX6MT0lYniH9micvIU7HQkYTfrx5Xi8u+YqwLtxC/3vl8TQN6rMg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [darwin] + '@img/sharp-darwin-x64@0.34.5': + resolution: {integrity: sha512-YNEFAF/4KQ/PeW0N+r+aVVsoIY0/qxxikF2SWdp+NRkmMB7y9LBZAVqQ4yhGCm/H3H270OSykqmQMKLBhBJDEw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [darwin] + '@img/sharp-libvips-darwin-arm64@1.2.3': resolution: {integrity: sha512-QzWAKo7kpHxbuHqUC28DZ9pIKpSi2ts2OJnoIGI26+HMgq92ZZ4vk8iJd4XsxN+tYfNJxzH6W62X5eTcsBymHw==} cpu: [arm64] os: [darwin] + '@img/sharp-libvips-darwin-arm64@1.2.4': + resolution: {integrity: sha512-zqjjo7RatFfFoP0MkQ51jfuFZBnVE2pRiaydKJ1G/rHZvnsrHAOcQALIi9sA5co5xenQdTugCvtb1cuf78Vf4g==} + cpu: [arm64] + os: [darwin] + '@img/sharp-libvips-darwin-x64@1.2.3': resolution: {integrity: sha512-Ju+g2xn1E2AKO6YBhxjj+ACcsPQRHT0bhpglxcEf+3uyPY+/gL8veniKoo96335ZaPo03bdDXMv0t+BBFAbmRA==} cpu: [x64] os: [darwin] + '@img/sharp-libvips-darwin-x64@1.2.4': + resolution: {integrity: sha512-1IOd5xfVhlGwX+zXv2N93k0yMONvUlANylbJw1eTah8K/Jtpi15KC+WSiaX/nBmbm2HxRM1gZ0nSdjSsrZbGKg==} + cpu: [x64] + os: [darwin] + '@img/sharp-libvips-linux-arm64@1.2.3': resolution: {integrity: sha512-I4RxkXU90cpufazhGPyVujYwfIm9Nk1QDEmiIsaPwdnm013F7RIceaCc87kAH+oUB1ezqEvC6ga4m7MSlqsJvQ==} cpu: [arm64] os: [linux] + '@img/sharp-libvips-linux-arm64@1.2.4': + resolution: {integrity: sha512-excjX8DfsIcJ10x1Kzr4RcWe1edC9PquDRRPx3YVCvQv+U5p7Yin2s32ftzikXojb1PIFc/9Mt28/y+iRklkrw==} + cpu: [arm64] + os: [linux] + '@img/sharp-libvips-linux-arm@1.2.3': resolution: {integrity: sha512-x1uE93lyP6wEwGvgAIV0gP6zmaL/a0tGzJs/BIDDG0zeBhMnuUPm7ptxGhUbcGs4okDJrk4nxgrmxpib9g6HpA==} cpu: [arm] os: [linux] + '@img/sharp-libvips-linux-arm@1.2.4': + resolution: {integrity: sha512-bFI7xcKFELdiNCVov8e44Ia4u2byA+l3XtsAj+Q8tfCwO6BQ8iDojYdvoPMqsKDkuoOo+X6HZA0s0q11ANMQ8A==} + cpu: [arm] + os: [linux] + '@img/sharp-libvips-linux-ppc64@1.2.3': resolution: {integrity: sha512-Y2T7IsQvJLMCBM+pmPbM3bKT/yYJvVtLJGfCs4Sp95SjvnFIjynbjzsa7dY1fRJX45FTSfDksbTp6AGWudiyCg==} cpu: [ppc64] os: [linux] + '@img/sharp-libvips-linux-ppc64@1.2.4': + resolution: {integrity: sha512-FMuvGijLDYG6lW+b/UvyilUWu5Ayu+3r2d1S8notiGCIyYU/76eig1UfMmkZ7vwgOrzKzlQbFSuQfgm7GYUPpA==} + cpu: [ppc64] + os: [linux] + + '@img/sharp-libvips-linux-riscv64@1.2.4': + resolution: {integrity: sha512-oVDbcR4zUC0ce82teubSm+x6ETixtKZBh/qbREIOcI3cULzDyb18Sr/Wcyx7NRQeQzOiHTNbZFF1UwPS2scyGA==} + cpu: [riscv64] + os: [linux] + '@img/sharp-libvips-linux-s390x@1.2.3': resolution: {integrity: sha512-RgWrs/gVU7f+K7P+KeHFaBAJlNkD1nIZuVXdQv6S+fNA6syCcoboNjsV2Pou7zNlVdNQoQUpQTk8SWDHUA3y/w==} cpu: [s390x] os: [linux] + '@img/sharp-libvips-linux-s390x@1.2.4': + resolution: {integrity: sha512-qmp9VrzgPgMoGZyPvrQHqk02uyjA0/QrTO26Tqk6l4ZV0MPWIW6LTkqOIov+J1yEu7MbFQaDpwdwJKhbJvuRxQ==} + cpu: [s390x] + os: [linux] + '@img/sharp-libvips-linux-x64@1.2.3': resolution: {integrity: sha512-3JU7LmR85K6bBiRzSUc/Ff9JBVIFVvq6bomKE0e63UXGeRw2HPVEjoJke1Yx+iU4rL7/7kUjES4dZ/81Qjhyxg==} cpu: [x64] os: [linux] + '@img/sharp-libvips-linux-x64@1.2.4': + resolution: {integrity: sha512-tJxiiLsmHc9Ax1bz3oaOYBURTXGIRDODBqhveVHonrHJ9/+k89qbLl0bcJns+e4t4rvaNBxaEZsFtSfAdquPrw==} + cpu: [x64] + os: [linux] + '@img/sharp-libvips-linuxmusl-arm64@1.2.3': resolution: {integrity: sha512-F9q83RZ8yaCwENw1GieztSfj5msz7GGykG/BA+MOUefvER69K/ubgFHNeSyUu64amHIYKGDs4sRCMzXVj8sEyw==} cpu: [arm64] os: [linux] + '@img/sharp-libvips-linuxmusl-arm64@1.2.4': + resolution: {integrity: sha512-FVQHuwx1IIuNow9QAbYUzJ+En8KcVm9Lk5+uGUQJHaZmMECZmOlix9HnH7n1TRkXMS0pGxIJokIVB9SuqZGGXw==} + cpu: [arm64] + os: [linux] + '@img/sharp-libvips-linuxmusl-x64@1.2.3': resolution: {integrity: sha512-U5PUY5jbc45ANM6tSJpsgqmBF/VsL6LnxJmIf11kB7J5DctHgqm0SkuXzVWtIY90GnJxKnC/JT251TDnk1fu/g==} cpu: [x64] os: [linux] + '@img/sharp-libvips-linuxmusl-x64@1.2.4': + resolution: {integrity: sha512-+LpyBk7L44ZIXwz/VYfglaX/okxezESc6UxDSoyo2Ks6Jxc4Y7sGjpgU9s4PMgqgjj1gZCylTieNamqA1MF7Dg==} + cpu: [x64] + os: [linux] + '@img/sharp-linux-arm64@0.34.4': resolution: {integrity: sha512-YXU1F/mN/Wu786tl72CyJjP/Ngl8mGHN1hST4BGl+hiW5jhCnV2uRVTNOcaYPs73NeT/H8Upm3y9582JVuZHrQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] + '@img/sharp-linux-arm64@0.34.5': + resolution: {integrity: sha512-bKQzaJRY/bkPOXyKx5EVup7qkaojECG6NLYswgktOZjaXecSAeCWiZwwiFf3/Y+O1HrauiE3FVsGxFg8c24rZg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [linux] + '@img/sharp-linux-arm@0.34.4': resolution: {integrity: sha512-Xyam4mlqM0KkTHYVSuc6wXRmM7LGN0P12li03jAnZ3EJWZqj83+hi8Y9UxZUbxsgsK1qOEwg7O0Bc0LjqQVtxA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm] os: [linux] + '@img/sharp-linux-arm@0.34.5': + resolution: {integrity: sha512-9dLqsvwtg1uuXBGZKsxem9595+ujv0sJ6Vi8wcTANSFpwV/GONat5eCkzQo/1O6zRIkh0m/8+5BjrRr7jDUSZw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm] + os: [linux] + '@img/sharp-linux-ppc64@0.34.4': resolution: {integrity: sha512-F4PDtF4Cy8L8hXA2p3TO6s4aDt93v+LKmpcYFLAVdkkD3hSxZzee0rh6/+94FpAynsuMpLX5h+LRsSG3rIciUQ==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [ppc64] os: [linux] + '@img/sharp-linux-ppc64@0.34.5': + resolution: {integrity: sha512-7zznwNaqW6YtsfrGGDA6BRkISKAAE1Jo0QdpNYXNMHu2+0dTrPflTLNkpc8l7MUP5M16ZJcUvysVWWrMefZquA==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [ppc64] + os: [linux] + + '@img/sharp-linux-riscv64@0.34.5': + resolution: {integrity: sha512-51gJuLPTKa7piYPaVs8GmByo7/U7/7TZOq+cnXJIHZKavIRHAP77e3N2HEl3dgiqdD/w0yUfiJnII77PuDDFdw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [riscv64] + os: [linux] + '@img/sharp-linux-s390x@0.34.4': resolution: {integrity: sha512-qVrZKE9Bsnzy+myf7lFKvng6bQzhNUAYcVORq2P7bDlvmF6u2sCmK2KyEQEBdYk+u3T01pVsPrkj943T1aJAsw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [s390x] os: [linux] + '@img/sharp-linux-s390x@0.34.5': + resolution: {integrity: sha512-nQtCk0PdKfho3eC5MrbQoigJ2gd1CgddUMkabUj+rBevs8tZ2cULOx46E7oyX+04WGfABgIwmMC0VqieTiR4jg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [s390x] + os: [linux] + '@img/sharp-linux-x64@0.34.4': resolution: {integrity: sha512-ZfGtcp2xS51iG79c6Vhw9CWqQC8l2Ot8dygxoDoIQPTat/Ov3qAa8qpxSrtAEAJW+UjTXc4yxCjNfxm4h6Xm2A==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] + '@img/sharp-linux-x64@0.34.5': + resolution: {integrity: sha512-MEzd8HPKxVxVenwAa+JRPwEC7QFjoPWuS5NZnBt6B3pu7EG2Ge0id1oLHZpPJdn3OQK+BQDiw9zStiHBTJQQQQ==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [linux] + '@img/sharp-linuxmusl-arm64@0.34.4': resolution: {integrity: sha512-8hDVvW9eu4yHWnjaOOR8kHVrew1iIX+MUgwxSuH2XyYeNRtLUe4VNioSqbNkB7ZYQJj9rUTT4PyRscyk2PXFKA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [linux] + '@img/sharp-linuxmusl-arm64@0.34.5': + resolution: {integrity: sha512-fprJR6GtRsMt6Kyfq44IsChVZeGN97gTD331weR1ex1c1rypDEABN6Tm2xa1wE6lYb5DdEnk03NZPqA7Id21yg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [linux] + '@img/sharp-linuxmusl-x64@0.34.4': resolution: {integrity: sha512-lU0aA5L8QTlfKjpDCEFOZsTYGn3AEiO6db8W5aQDxj0nQkVrZWmN3ZP9sYKWJdtq3PWPhUNlqehWyXpYDcI9Sg==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [linux] + '@img/sharp-linuxmusl-x64@0.34.5': + resolution: {integrity: sha512-Jg8wNT1MUzIvhBFxViqrEhWDGzqymo3sV7z7ZsaWbZNDLXRJZoRGrjulp60YYtV4wfY8VIKcWidjojlLcWrd8Q==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [linux] + '@img/sharp-wasm32@0.34.4': resolution: {integrity: sha512-33QL6ZO/qpRyG7woB/HUALz28WnTMI2W1jgX3Nu2bypqLIKx/QKMILLJzJjI+SIbvXdG9fUnmrxR7vbi1sTBeA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [wasm32] + '@img/sharp-wasm32@0.34.5': + resolution: {integrity: sha512-OdWTEiVkY2PHwqkbBI8frFxQQFekHaSSkUIJkwzclWZe64O1X4UlUjqqqLaPbUpMOQk6FBu/HtlGXNblIs0huw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [wasm32] + '@img/sharp-win32-arm64@0.34.4': resolution: {integrity: sha512-2Q250do/5WXTwxW3zjsEuMSv5sUU4Tq9VThWKlU2EYLm4MB7ZeMwF+SFJutldYODXF6jzc6YEOC+VfX0SZQPqA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [arm64] os: [win32] + '@img/sharp-win32-arm64@0.34.5': + resolution: {integrity: sha512-WQ3AgWCWYSb2yt+IG8mnC6Jdk9Whs7O0gxphblsLvdhSpSTtmu69ZG1Gkb6NuvxsNACwiPV6cNSZNzt0KPsw7g==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [arm64] + os: [win32] + '@img/sharp-win32-ia32@0.34.4': resolution: {integrity: sha512-3ZeLue5V82dT92CNL6rsal6I2weKw1cYu+rGKm8fOCCtJTR2gYeUfY3FqUnIJsMUPIH68oS5jmZ0NiJ508YpEw==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [ia32] os: [win32] + '@img/sharp-win32-ia32@0.34.5': + resolution: {integrity: sha512-FV9m/7NmeCmSHDD5j4+4pNI8Cp3aW+JvLoXcTUo0IqyjSfAZJ8dIUmijx1qaJsIiU+Hosw6xM5KijAWRJCSgNg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [ia32] + os: [win32] + '@img/sharp-win32-x64@0.34.4': resolution: {integrity: sha512-xIyj4wpYs8J18sVN3mSQjwrw7fKUqRw+Z5rnHNCy5fYTxigBz81u5mOMPmFumwjcn8+ld1ppptMBCLic1nz6ig==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} cpu: [x64] os: [win32] + '@img/sharp-win32-x64@0.34.5': + resolution: {integrity: sha512-+29YMsqY2/9eFEiW93eqWnuLcWcufowXewwSNIT6UwZdUUCrM3oFjMWH/Z6/TMmb4hlFenmfAVbpWeup2jryCw==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + cpu: [x64] + os: [win32] + '@inquirer/ansi@1.0.2': resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==} engines: {node: '>=18'} @@ -8617,6 +8753,10 @@ packages: resolution: {integrity: sha512-e23vBV1ZLfjb9apvfPk4rHVu2ry6RIr2Wfs+O324okSidrX7pTAnEJPCh/O5BtRlr7QtZI7ktOP3vsqr7Z5XoA==} hasBin: true + baseline-browser-mapping@2.9.19: + resolution: {integrity: sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==} + hasBin: true + bcp-47-match@2.0.3: resolution: {integrity: sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ==} @@ -10574,11 +10714,13 @@ packages: glob@10.5.0: resolution: {integrity: sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true glob@11.1.0: resolution: {integrity: sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw==} engines: {node: 20 || >=22} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me hasBin: true glob@13.0.0: @@ -13722,6 +13864,10 @@ packages: resolution: {integrity: sha512-FUH39xp3SBPnxWvd5iib1X8XY7J0K0X7d93sie9CJg2PO8/7gmg89Nve6OjItK53/MlAushNNxteBYfM6DEuoA==} engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + sharp@0.34.5: + resolution: {integrity: sha512-Ou9I5Ft9WNcCbXrU9cMgPBcCK8LiwLqcbywW3t4oDV37n1pzpuNLsYiAV8eODnjbtQlSDwZ2cUEeQz4E54Hltg==} + engines: {node: ^18.17.0 || ^20.3.0 || >=21.0.0} + shebang-command@2.0.0: resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} engines: {node: '>=8'} @@ -16519,6 +16665,11 @@ snapshots: tslib: 2.8.1 optional: true + '@emnapi/runtime@1.8.1': + dependencies: + tslib: 2.8.1 + optional: true + '@emnapi/wasi-threads@1.1.0': dependencies: tslib: 2.8.1 @@ -17022,87 +17173,181 @@ snapshots: '@img/sharp-libvips-darwin-arm64': 1.2.3 optional: true + '@img/sharp-darwin-arm64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-darwin-arm64': 1.2.4 + optional: true + '@img/sharp-darwin-x64@0.34.4': optionalDependencies: '@img/sharp-libvips-darwin-x64': 1.2.3 optional: true + '@img/sharp-darwin-x64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-darwin-x64': 1.2.4 + optional: true + '@img/sharp-libvips-darwin-arm64@1.2.3': optional: true + '@img/sharp-libvips-darwin-arm64@1.2.4': + optional: true + '@img/sharp-libvips-darwin-x64@1.2.3': optional: true + '@img/sharp-libvips-darwin-x64@1.2.4': + optional: true + '@img/sharp-libvips-linux-arm64@1.2.3': optional: true + '@img/sharp-libvips-linux-arm64@1.2.4': + optional: true + '@img/sharp-libvips-linux-arm@1.2.3': optional: true + '@img/sharp-libvips-linux-arm@1.2.4': + optional: true + '@img/sharp-libvips-linux-ppc64@1.2.3': optional: true + '@img/sharp-libvips-linux-ppc64@1.2.4': + optional: true + + '@img/sharp-libvips-linux-riscv64@1.2.4': + optional: true + '@img/sharp-libvips-linux-s390x@1.2.3': optional: true + '@img/sharp-libvips-linux-s390x@1.2.4': + optional: true + '@img/sharp-libvips-linux-x64@1.2.3': optional: true + '@img/sharp-libvips-linux-x64@1.2.4': + optional: true + '@img/sharp-libvips-linuxmusl-arm64@1.2.3': optional: true + '@img/sharp-libvips-linuxmusl-arm64@1.2.4': + optional: true + '@img/sharp-libvips-linuxmusl-x64@1.2.3': optional: true + '@img/sharp-libvips-linuxmusl-x64@1.2.4': + optional: true + '@img/sharp-linux-arm64@0.34.4': optionalDependencies: '@img/sharp-libvips-linux-arm64': 1.2.3 optional: true + '@img/sharp-linux-arm64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-arm64': 1.2.4 + optional: true + '@img/sharp-linux-arm@0.34.4': optionalDependencies: '@img/sharp-libvips-linux-arm': 1.2.3 optional: true + '@img/sharp-linux-arm@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-arm': 1.2.4 + optional: true + '@img/sharp-linux-ppc64@0.34.4': optionalDependencies: '@img/sharp-libvips-linux-ppc64': 1.2.3 optional: true + '@img/sharp-linux-ppc64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-ppc64': 1.2.4 + optional: true + + '@img/sharp-linux-riscv64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-riscv64': 1.2.4 + optional: true + '@img/sharp-linux-s390x@0.34.4': optionalDependencies: '@img/sharp-libvips-linux-s390x': 1.2.3 optional: true + '@img/sharp-linux-s390x@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-s390x': 1.2.4 + optional: true + '@img/sharp-linux-x64@0.34.4': optionalDependencies: '@img/sharp-libvips-linux-x64': 1.2.3 optional: true + '@img/sharp-linux-x64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linux-x64': 1.2.4 + optional: true + '@img/sharp-linuxmusl-arm64@0.34.4': optionalDependencies: '@img/sharp-libvips-linuxmusl-arm64': 1.2.3 optional: true + '@img/sharp-linuxmusl-arm64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linuxmusl-arm64': 1.2.4 + optional: true + '@img/sharp-linuxmusl-x64@0.34.4': optionalDependencies: '@img/sharp-libvips-linuxmusl-x64': 1.2.3 optional: true + '@img/sharp-linuxmusl-x64@0.34.5': + optionalDependencies: + '@img/sharp-libvips-linuxmusl-x64': 1.2.4 + optional: true + '@img/sharp-wasm32@0.34.4': dependencies: '@emnapi/runtime': 1.5.0 optional: true + '@img/sharp-wasm32@0.34.5': + dependencies: + '@emnapi/runtime': 1.8.1 + optional: true + '@img/sharp-win32-arm64@0.34.4': optional: true + '@img/sharp-win32-arm64@0.34.5': + optional: true + '@img/sharp-win32-ia32@0.34.4': optional: true + '@img/sharp-win32-ia32@0.34.5': + optional: true + '@img/sharp-win32-x64@0.34.4': optional: true + '@img/sharp-win32-x64@0.34.5': + optional: true + '@inquirer/ansi@1.0.2': {} '@inquirer/checkbox@4.3.2(@types/node@22.19.0)': @@ -23455,6 +23700,8 @@ snapshots: baseline-browser-mapping@2.9.18: {} + baseline-browser-mapping@2.9.19: {} + bcp-47-match@2.0.3: {} bcp-47-normalize@2.3.0: @@ -27536,7 +27783,7 @@ snapshots: dependencies: '@next/env': 16.1.6 '@swc/helpers': 0.5.15 - baseline-browser-mapping: 2.9.18 + baseline-browser-mapping: 2.9.19 caniuse-lite: 1.0.30001766 postcss: 8.4.31 react: 19.2.4 @@ -27552,7 +27799,7 @@ snapshots: '@next/swc-win32-arm64-msvc': 16.1.6 '@next/swc-win32-x64-msvc': 16.1.6 '@opentelemetry/api': 1.9.0 - sharp: 0.34.4 + sharp: 0.34.5 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros @@ -30015,6 +30262,38 @@ snapshots: '@img/sharp-win32-x64': 0.34.4 optional: true + sharp@0.34.5: + dependencies: + '@img/colour': 1.0.0 + detect-libc: 2.1.2 + semver: 7.7.3 + optionalDependencies: + '@img/sharp-darwin-arm64': 0.34.5 + '@img/sharp-darwin-x64': 0.34.5 + '@img/sharp-libvips-darwin-arm64': 1.2.4 + '@img/sharp-libvips-darwin-x64': 1.2.4 + '@img/sharp-libvips-linux-arm': 1.2.4 + '@img/sharp-libvips-linux-arm64': 1.2.4 + '@img/sharp-libvips-linux-ppc64': 1.2.4 + '@img/sharp-libvips-linux-riscv64': 1.2.4 + '@img/sharp-libvips-linux-s390x': 1.2.4 + '@img/sharp-libvips-linux-x64': 1.2.4 + '@img/sharp-libvips-linuxmusl-arm64': 1.2.4 + '@img/sharp-libvips-linuxmusl-x64': 1.2.4 + '@img/sharp-linux-arm': 0.34.5 + '@img/sharp-linux-arm64': 0.34.5 + '@img/sharp-linux-ppc64': 0.34.5 + '@img/sharp-linux-riscv64': 0.34.5 + '@img/sharp-linux-s390x': 0.34.5 + '@img/sharp-linux-x64': 0.34.5 + '@img/sharp-linuxmusl-arm64': 0.34.5 + '@img/sharp-linuxmusl-x64': 0.34.5 + '@img/sharp-wasm32': 0.34.5 + '@img/sharp-win32-arm64': 0.34.5 + '@img/sharp-win32-ia32': 0.34.5 + '@img/sharp-win32-x64': 0.34.5 + optional: true + shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0