diff --git a/.vscode/settings.json b/.vscode/settings.json index 65b2c69ed8..9147ed42a9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -2,14 +2,27 @@ "editor.defaultFormatter": "esbenp.prettier-vscode", "cSpell.words": [ "cfetch", + "clipboardy", + "cloudflared", "esbuild", "estree", "execa", + "extensionless", "iarna", "keyvalue", "middlewares", "Miniflare", + "outdir", "outfile", - "Positionals" - ] + "pgrep", + "PKCE", + "Positionals", + "undici", + "wasmvalue", + "weakmap", + "weakset", + "webassemblymemory", + "websockets" + ], + "cSpell.ignoreWords": ["yxxx"] } diff --git a/packages/wrangler/pages/functions/filepath-routing.ts b/packages/wrangler/pages/functions/filepath-routing.ts index 401893fc5a..66f883938f 100644 --- a/packages/wrangler/pages/functions/filepath-routing.ts +++ b/packages/wrangler/pages/functions/filepath-routing.ts @@ -52,7 +52,7 @@ export async function generateConfigFromFileTree({ const declaration = node.declaration; // `export async function onRequest() {...}` - if (declaration.type === "FunctionDeclaration") { + if (declaration.type === "FunctionDeclaration" && declaration.id) { exportNames.push(declaration.id.name); } @@ -155,12 +155,10 @@ export async function generateConfigFromFileTree({ // more specific routes aren't occluded from matching due to // less specific routes appearing first in the route list. export function compareRoutes(a: string, b: string) { - function parseRoutePath(routePath: string) { - let [method, segmentedPath] = routePath.split(" "); - if (!segmentedPath) { - segmentedPath = method; - method = null; - } + function parseRoutePath(routePath: string): [string | null, string[]] { + const parts = routePath.split(" ", 2); + const segmentedPath = parts.pop() ?? ""; + const method = parts.pop() ?? null; const segments = segmentedPath.slice(1).split("/").filter(Boolean); return [method, segments]; @@ -205,7 +203,9 @@ async function forEachFile( const returnValues: T[] = []; while (searchPaths.length) { - const cwd = searchPaths.shift(); + // The `searchPaths.length` check above ensures that `searchPaths.shift()` is defined + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const cwd = searchPaths.shift()!; const dir = await fs.readdir(cwd, { withFileTypes: true }); for (const entry of dir) { const pathname = path.join(cwd, entry.name); diff --git a/packages/wrangler/pages/functions/routes.ts b/packages/wrangler/pages/functions/routes.ts index 4f535a89c1..e10c9fad73 100755 --- a/packages/wrangler/pages/functions/routes.ts +++ b/packages/wrangler/pages/functions/routes.ts @@ -114,7 +114,7 @@ export function parseConfig(config: Config, baseDir: string) { }); } - for (const [route, props] of Object.entries(config.routes)) { + for (const [route, props] of Object.entries(config.routes ?? {})) { let [_methods, routePath] = route.split(" "); if (!routePath) { routePath = _methods; diff --git a/packages/wrangler/pages/functions/template-worker.ts b/packages/wrangler/pages/functions/template-worker.ts index c462eb16da..f5d6a46b95 100644 --- a/packages/wrangler/pages/functions/template-worker.ts +++ b/packages/wrangler/pages/functions/template-worker.ts @@ -33,17 +33,16 @@ declare const routes: RouteHandler[]; declare const __FALLBACK_SERVICE__: string; // expect an ASSETS fetcher binding pointing to the asset-server stage -type Env = { - [name: string]: unknown; - ASSETS: { fetch(url: string, init: RequestInit): Promise }; +type FetchEnv = { + [name: string]: { fetch: typeof fetch }; + ASSETS: { fetch: typeof fetch }; }; type WorkerContext = { waitUntil: (promise: Promise) => void; }; -// eslint-disable-next-line @typescript-eslint/no-unused-vars -- `env` can be used by __FALLBACK_SERVICE_FETCH__ -function* executeRequest(request: Request, env: Env) { +function* executeRequest(request: Request, _env: FetchEnv) { const requestPath = new URL(request.url).pathname; // First, iterate through the routes (backwards) and execute "middlewares" on partial route matches @@ -88,20 +87,10 @@ function* executeRequest(request: Request, env: Env) { break; } } - - // Finally, yield to the fallback service (`env.ASSETS.fetch` in Pages' case) - return { - handler: () => - __FALLBACK_SERVICE__ - ? // @ts-expect-error expecting __FALLBACK_SERVICE__ to be the name of a service binding, so fetch should be defined - env[__FALLBACK_SERVICE__].fetch(request) - : fetch(request), - params: {} as Params, - }; } export default { - async fetch(request: Request, env: Env, workerContext: WorkerContext) { + async fetch(request: Request, env: FetchEnv, workerContext: WorkerContext) { const handlerIterator = executeRequest(request, env); const data = {}; // arbitrary data the user can set between functions const next = async (input?: RequestInfo, init?: RequestInit) => { @@ -109,14 +98,10 @@ export default { request = new Request(input, init); } - const { value } = handlerIterator.next(); - if (value) { - const { handler, params } = value; - const context: EventContext< - unknown, - string, - Record - > = { + const result = handlerIterator.next(); + if (!result.done) { + const { handler, params } = result.value; + const context = { request: new Request(request.clone()), next, params, @@ -132,6 +117,12 @@ export default { [101, 204, 205, 304].includes(response.status) ? null : response.body, response ); + } else if (__FALLBACK_SERVICE__) { + // There are no more handlers so finish with the fallback service (`env.ASSETS.fetch` in Pages' case) + return env[__FALLBACK_SERVICE__].fetch(request); + } else { + // There was not fallback service so actually make the request to the origin. + return fetch(request); } }; diff --git a/packages/wrangler/src/__tests__/kv.test.ts b/packages/wrangler/src/__tests__/kv.test.ts index 88af107985..ad04f28ca5 100644 --- a/packages/wrangler/src/__tests__/kv.test.ts +++ b/packages/wrangler/src/__tests__/kv.test.ts @@ -212,7 +212,7 @@ describe("wrangler", () => { it("should make multiple requests for paginated results", async () => { // Create a lot of mock namespaces, so that the fetch requests will be paginated - const KVNamespaces = []; + const KVNamespaces: { title: string; id: string }[] = []; for (let i = 0; i < 550; i++) { KVNamespaces.push({ title: "title-" + i, id: "id-" + i }); } @@ -335,8 +335,12 @@ describe("wrangler", () => { expect(namespaceId).toEqual(expectedNamespaceId); expect(key).toEqual(expectedKey); expect(body).toEqual(expectedValue); - expect(query.get("expiration")).toEqual(`${expiration}`); - expect(query.get("expiration_ttl")).toEqual(`${expirationTtl}`); + if (expiration) { + expect(query.get("expiration")).toEqual(`${expiration}`); + } + if (expirationTtl) { + expect(query.get("expiration_ttl")).toEqual(`${expirationTtl}`); + } return null; } ); @@ -681,7 +685,7 @@ describe("wrangler", () => { if (expectedKeys.length <= keysPerRequest) { return createFetchResult(expectedKeys); } else { - const start = parseInt(query.get("cursor")) || 0; + const start = parseInt(query.get("cursor") ?? "0") || 0; const end = start + keysPerRequest; const cursor = end < expectedKeys.length ? end : undefined; return createFetchResult( @@ -778,7 +782,7 @@ describe("wrangler", () => { it("should make multiple requests for paginated results", async () => { // Create a lot of mock keys, so that the fetch requests will be paginated - const keys = []; + const keys: string[] = []; for (let i = 0; i < 550; i++) { keys.push("key-" + i); } diff --git a/packages/wrangler/src/__tests__/mock-cfetch.ts b/packages/wrangler/src/__tests__/mock-cfetch.ts index f0ca6c0357..9773080703 100644 --- a/packages/wrangler/src/__tests__/mock-cfetch.ts +++ b/packages/wrangler/src/__tests__/mock-cfetch.ts @@ -1,5 +1,5 @@ import type { RequestInit } from "node-fetch"; -import type { URLSearchParams } from "node:url"; +import { URLSearchParams } from "node:url"; import { pathToRegexp } from "path-to-regexp"; import { CF_API_BASE_URL } from "../cfetch"; import type { FetchResult } from "../cfetch"; @@ -9,8 +9,8 @@ import type { FetchResult } from "../cfetch"; */ export type MockHandler = ( uri: RegExpExecArray, - init?: RequestInit, - queryParams?: URLSearchParams + init: RequestInit, + queryParams: URLSearchParams ) => ResponseType; type RemoveMockFn = () => void; @@ -32,7 +32,7 @@ const mocks: MockFetch[] = []; export async function mockFetchInternal( resource: string, init: RequestInit = {}, - queryParams?: URLSearchParams + queryParams: URLSearchParams = new URLSearchParams() ) { for (const { regexp, method, handler } of mocks) { const resourcePath = new URL(resource, CF_API_BASE_URL).pathname; diff --git a/packages/wrangler/src/api/form_data.ts b/packages/wrangler/src/api/form_data.ts index 90a3fb4fad..51eebb282d 100644 --- a/packages/wrangler/src/api/form_data.ts +++ b/packages/wrangler/src/api/form_data.ts @@ -23,7 +23,7 @@ export function toMimeType(type: CfModuleType): string { } } -function toModule(module: CfModule, entryType?: CfModuleType): Blob { +function toModule(module: CfModule, entryType: CfModuleType): Blob { const { type: moduleType, content } = module; const type = toMimeType(moduleType ?? entryType); diff --git a/packages/wrangler/src/api/worker.ts b/packages/wrangler/src/api/worker.ts index 382cad8487..65b27bbd20 100644 --- a/packages/wrangler/src/api/worker.ts +++ b/packages/wrangler/src/api/worker.ts @@ -112,7 +112,7 @@ export interface CfWorkerInit { /** * The name of the worker. */ - name: string | void; + name: string | undefined; /** * The entrypoint module. */ @@ -120,7 +120,7 @@ export interface CfWorkerInit { /** * The list of additional modules. */ - modules: void | CfModule[]; + modules: undefined | CfModule[]; /** * All the bindings */ @@ -130,10 +130,10 @@ export interface CfWorkerInit { vars?: CfVars; services?: CfService[]; }; - migrations: void | CfDurableObjectMigrations; - compatibility_date: string | void; - compatibility_flags: void | string[]; - usage_model: void | "bundled" | "unbound"; + migrations: undefined | CfDurableObjectMigrations; + compatibility_date: string | undefined; + compatibility_flags: undefined | string[]; + usage_model: undefined | "bundled" | "unbound"; } /** diff --git a/packages/wrangler/src/cfetch/internal.ts b/packages/wrangler/src/cfetch/internal.ts index d77163e404..f7bdeae7d3 100644 --- a/packages/wrangler/src/cfetch/internal.ts +++ b/packages/wrangler/src/cfetch/internal.ts @@ -1,5 +1,5 @@ -import fetch from "node-fetch"; -import type { RequestInit, HeadersInit } from "node-fetch"; +import fetch, { Headers } from "node-fetch"; +import type { RequestInit } from "node-fetch"; import { getAPIToken, loginOrRefreshIfRequired } from "../user"; export const CF_API_BASE_URL = @@ -21,7 +21,7 @@ export async function fetchInternal( ): Promise { await requireLoggedIn(); const apiToken = requireApiToken(); - const headers = cloneHeaders(init.headers); + const headers = new Headers(init.headers); addAuthorizationHeader(headers, apiToken); const queryString = queryParams ? `?${queryParams.toString()}` : ""; @@ -55,11 +55,7 @@ function requireApiToken(): string { return apiToken; } -function cloneHeaders(headers: HeadersInit): HeadersInit { - return { ...headers }; -} - -function addAuthorizationHeader(headers: HeadersInit, apiToken: string): void { +function addAuthorizationHeader(headers: Headers, apiToken: string): void { if (headers["Authorization"]) { throw new Error( "The request already specifies an authorisation header - cannot add a new one." diff --git a/packages/wrangler/src/config.ts b/packages/wrangler/src/config.ts index 6992fba7c7..794b54b9e9 100644 --- a/packages/wrangler/src/config.ts +++ b/packages/wrangler/src/config.ts @@ -127,5 +127,5 @@ export type Config = { usage_model?: UsageModel; // inherited // top level build?: Build; - env?: { [envName: string]: void | Env }; + env?: { [envName: string]: undefined | Env }; }; diff --git a/packages/wrangler/src/dev.tsx b/packages/wrangler/src/dev.tsx index 4aa5329d4b..1334fea74a 100644 --- a/packages/wrangler/src/dev.tsx +++ b/packages/wrangler/src/dev.tsx @@ -26,23 +26,23 @@ import { usePreviewServer } from "./proxy"; import { execa } from "execa"; import { watch } from "chokidar"; -type CfScriptFormat = void | "modules" | "service-worker"; +type CfScriptFormat = undefined | "modules" | "service-worker"; export type DevProps = { name?: string; entry: string; port?: number; format: CfScriptFormat; - accountId: void | string; + accountId: undefined | string; initialMode: "local" | "remote"; - jsxFactory: void | string; - jsxFragment: void | string; + jsxFactory: undefined | string; + jsxFragment: undefined | string; bindings: CfWorkerInit["bindings"]; public: undefined | string; - site: void | string; - compatibilityDate: void | string; - compatibilityFlags: void | string[]; - usageModel: void | "bundled" | "unbound"; + site: undefined | string; + compatibilityDate: undefined | string; + compatibilityFlags: undefined | string[]; + usageModel: undefined | "bundled" | "unbound"; buildCommand: { command?: undefined | string; cwd?: undefined | string; @@ -56,7 +56,7 @@ function Dev(props: DevProps): JSX.Element { "You cannot use the service worker format with a `public` directory." ); } - const port = props.port || 8787; + const port = props.port ?? 8787; const apiToken = getAPIToken(); const directory = useTmpDir(); @@ -98,7 +98,7 @@ function Dev(props: DevProps): JSX.Element { bindings={props.bindings} site={props.site} public={props.public} - port={props.port} + port={port} /> ) : ( { + local.current.stdout?.on("data", (data: Buffer) => { console.log(`${data.toString()}`); }); - local.current.stderr.on("data", (data: Buffer) => { + local.current.stderr?.on("data", (data: Buffer) => { console.error(`${data.toString()}`); const matches = /Debugger listening on (ws:\/\/127\.0\.0\.1:9229\/[A-Za-z0-9-]+)/.exec( @@ -320,7 +320,7 @@ function useLocalWorker(props: { return { inspectorUrl }; } -function useTmpDir(): string | void { +function useTmpDir(): string | undefined { const [directory, setDirectory] = useState(); const handleError = useErrorHandler(); useEffect(() => { @@ -358,10 +358,10 @@ function useCustomBuild( cwd?: undefined | string; watch_dir?: undefined | string; } -): void | string { - const [entry, setEntry] = useState( +): undefined | string { + const [entry, setEntry] = useState( // if there's no build command, just return the expected entry - props.command ? null : expectedEntry + props.command ? undefined : expectedEntry ); const { command, cwd, watch_dir } = props; useEffect(() => { @@ -430,12 +430,12 @@ type EsbuildBundle = { }; function useEsbuild(props: { - entry: void | string; - destination: string | void; - staticRoot: void | string; - jsxFactory: string | void; - jsxFragment: string | void; -}): EsbuildBundle | void { + entry: undefined | string; + destination: string | undefined; + staticRoot: undefined | string; + jsxFactory: string | undefined; + jsxFragment: string | undefined; +}): EsbuildBundle | undefined { const { entry, destination, staticRoot, jsxFactory, jsxFragment } = props; const [bundle, setBundle] = useState(); useEffect(() => { @@ -465,25 +465,35 @@ function useEsbuild(props: { else { // nothing really changes here, so let's increment the id // to change the return object's identity - setBundle((previousBundle) => ({ - ...previousBundle, - id: previousBundle.id + 1, - })); + setBundle((previousBundle) => { + if (previousBundle === undefined) { + throw new Error( + "Rebuild triggered with no previous build available" + ); + } + return { ...previousBundle, id: previousBundle.id + 1 }; + }); } }, }, }); - const chunks = Object.entries(result.metafile.outputs).find( + // result.metafile is defined because of the `metafile: true` option above. + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const outputEntry = Object.entries(result.metafile!.outputs).find( ([_path, { entryPoint }]) => entryPoint === entry ); // assumedly only one entry point - + if (outputEntry === undefined) { + throw new Error( + `Cannot find entry-point "${entry}" in generated bundle.` + ); + } setBundle({ id: 0, entry, - path: chunks[0], - type: chunks[1].exports.length > 0 ? "esm" : "commonjs", - exports: chunks[1].exports, + path: outputEntry[0], + type: outputEntry[1].exports.length > 0 ? "esm" : "commonjs", + exports: outputEntry[1].exports, modules: moduleCollector.modules, }); } @@ -493,26 +503,24 @@ function useEsbuild(props: { // on build errors anyway // so this is a no-op error handler }); - return () => { - result?.stop(); - }; + return () => result.stop?.(); }, [entry, destination, staticRoot, jsxFactory, jsxFragment]); return bundle; } function useWorker(props: { - name: void | string; - bundle: EsbuildBundle | void; + name: undefined | string; + bundle: EsbuildBundle | undefined; format: CfScriptFormat; modules: CfModule[]; accountId: string; apiToken: string; bindings: CfWorkerInit["bindings"]; - sitesFolder: void | string; - port: number; - compatibilityDate: string | void; - compatibilityFlags: string[] | void; - usageModel: void | "bundled" | "unbound"; + sitesFolder: undefined | string; + port: undefined | number; + compatibilityDate: string | undefined; + compatibilityFlags: string[] | undefined; + usageModel: undefined | "bundled" | "unbound"; }): CfPreviewToken | undefined { const { name, @@ -636,7 +644,7 @@ const SLEEP_DURATION = 2000; // really need a first class api for this const hostNameRegex = /userHostname="(.*)"/g; async function findTunnelHostname() { - let hostName: string; + let hostName: string | undefined; while (!hostName) { try { const resp = await fetch("http://localhost:8789/metrics"); diff --git a/packages/wrangler/src/index.tsx b/packages/wrangler/src/index.tsx index b5b1f1af23..904ec3cf9a 100644 --- a/packages/wrangler/src/index.tsx +++ b/packages/wrangler/src/index.tsx @@ -72,10 +72,14 @@ async function readConfig(configPath?: string): Promise { "usage_model", ]; - Object.keys(config.env || {}).forEach((env) => { + const environments = config.env ?? {}; + Object.values(environments).forEach((env) => { + if (env === undefined) { + return; + } inheritedFields.forEach((field) => { - if (config[field] !== undefined && config.env[env][field] === undefined) { - config.env[env][field] = config[field]; // TODO: - shallow copy? + if (config[field] !== undefined && env[field] === undefined) { + env[field] = config[field]; // TODO: - shallow copy? } }); }); @@ -86,13 +90,16 @@ async function readConfig(configPath?: string): Promise { "durable_objects", "experimental_services", ]; - Object.keys(config.env || {}).forEach((env) => { + Object.entries(environments).forEach(([envName, envValue]) => { + if (envValue === undefined) { + return; + } mirroredFields.forEach((field) => { // if it exists on top level, it should exist on env definitions Object.keys(config[field] || {}).forEach((fieldKey) => { - if (!(fieldKey in config.env[env][field])) { + if (!(fieldKey in envValue[field])) { console.warn( - `In your configuration, "${field}.${fieldKey}" exists at a top level, but not on "env.${env}". This is not what you probably want, since the field "${field}" is not inherited by environments. Please add "${field}.${fieldKey}" to "env.${env}".` + `In your configuration, "${field}.${fieldKey}" exists at a top level, but not on "env.${envName}". This is not what you probably want, since the field "${field}" is not inherited by environments. Please add "${field}.${fieldKey}" to "env.${envName}".` ); } }); @@ -421,7 +428,11 @@ export async function main(argv: string[]): Promise { "👂 Start a local server for developing your worker", (yargs) => { return yargs - .positional("filename", { describe: "entry point", type: "string" }) + .positional("filename", { + describe: "entry point", + type: "string", + demandOption: true, + }) .option("name", { describe: "name of the script", type: "string", @@ -527,7 +538,8 @@ export async function main(argv: string[]): Promise { // -- snip, end -- - const envRootObj = args.env ? config.env[args.env] || {} : config; + const environments = config.env ?? {}; + const envRootObj = args.env ? environments[args.env] || {} : config; // TODO: this error shouldn't actually happen, // but we haven't fixed it internally yet @@ -768,6 +780,12 @@ export async function main(argv: string[]): Promise { // TODO: filter by client ip, which can be 'self' or an ip address }, async (args) => { + if (args.local) { + throw new NotImplementedError( + `local mode is not yet supported for this command` + ); + } + const config = args.config as Config; if (!(args.name || config.name)) { @@ -779,18 +797,16 @@ export async function main(argv: string[]): Promise { // -- snip, extract -- - if (!args.local) { - const loggedIn = await loginOrRefreshIfRequired(); - if (!loggedIn) { - // didn't login, let's just quit - console.log("Did not login, quitting..."); - return; - } + const loggedIn = await loginOrRefreshIfRequired(); + if (!loggedIn) { + // didn't login, let's just quit + console.log("Did not login, quitting..."); + return; + } + if (!config.account_id) { + config.account_id = await getAccountId(); if (!config.account_id) { - config.account_id = await getAccountId(); - if (!config.account_id) { - throw new Error("No account id found, quitting..."); - } + throw new Error("No account id found, quitting..."); } } @@ -1259,18 +1275,16 @@ export async function main(argv: string[]): Promise { // -- snip, extract -- - if (!args.local) { - const loggedIn = await loginOrRefreshIfRequired(); - if (!loggedIn) { - // didn't login, let's just quit - console.log("Did not login, quitting..."); - return; - } + const loggedIn = await loginOrRefreshIfRequired(); + if (!loggedIn) { + // didn't login, let's just quit + console.log("Did not login, quitting..."); + return; + } + if (!config.account_id) { + config.account_id = await getAccountId(); if (!config.account_id) { - config.account_id = await getAccountId(); - if (!config.account_id) { - throw new Error("No account id found, quitting..."); - } + throw new Error("No account id found, quitting..."); } } @@ -1309,18 +1323,16 @@ export async function main(argv: string[]): Promise { // -- snip, extract -- - if (!args.local) { - const loggedIn = await loginOrRefreshIfRequired(); - if (!loggedIn) { - // didn't login, let's just quit - console.log("Did not login, quitting..."); - return; - } + const loggedIn = await loginOrRefreshIfRequired(); + if (!loggedIn) { + // didn't login, let's just quit + console.log("Did not login, quitting..."); + return; + } + if (!config.account_id) { + config.account_id = await getAccountId(); if (!config.account_id) { - config.account_id = await getAccountId(); - if (!config.account_id) { - throw new Error("No account id found, quitting..."); - } + throw new Error("No account id found, quitting..."); } } @@ -1379,19 +1391,17 @@ export async function main(argv: string[]): Promise { // -- snip, extract -- - if (!args.local) { - const loggedIn = await loginOrRefreshIfRequired(); - if (!loggedIn) { - // didn't login, let's just quit - console.log("Did not login, quitting..."); - return; - } + const loggedIn = await loginOrRefreshIfRequired(); + if (!loggedIn) { + // didn't login, let's just quit + console.log("Did not login, quitting..."); + return; + } + if (!config.account_id) { + config.account_id = await getAccountId(); if (!config.account_id) { - config.account_id = await getAccountId(); - if (!config.account_id) { - throw new Error("No account id found, quitting..."); - } + throw new Error("No account id found, quitting..."); } } @@ -1438,6 +1448,7 @@ export async function main(argv: string[]): Promise { .positional("key", { type: "string", describe: "The key to write to.", + demandOption: true, }) .positional("value", { type: "string", @@ -1477,9 +1488,11 @@ export async function main(argv: string[]): Promise { }, async ({ key, ttl, expiration, ...args }) => { const namespaceId = getNamespaceId(args); + // One of `args.path` and `args.value` must be defined const value = args.path ? await readFile(args.path, "utf-8") - : args.value; + : // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + args.value!; const config = args.config as Config; if (args.path) { @@ -1501,11 +1514,7 @@ export async function main(argv: string[]): Promise { }); const ns = await mf.getKVNamespace(namespaceId); await ns.put(key, value, { expiration, expirationTtl: ttl }); - return; - } - // -- snip, extract -- - - if (!args.local) { + } else { const loggedIn = await loginOrRefreshIfRequired(); if (!loggedIn) { // didn't login, let's just quit @@ -1519,14 +1528,12 @@ export async function main(argv: string[]): Promise { throw new Error("No account id found, quitting..."); } } - } - // -- snip, end -- - - await putKeyValue(config.account_id, namespaceId, key, value, { - expiration, - expiration_ttl: ttl, - }); + await putKeyValue(config.account_id, namespaceId, key, value, { + expiration, + expiration_ttl: ttl, + }); + } } ) .command( @@ -1574,12 +1581,7 @@ export async function main(argv: string[]): Promise { const ns = await mf.getKVNamespace(namespaceId); const listResponse = await ns.list({ prefix }); console.log(JSON.stringify(listResponse.keys, null, " ")); // TODO: paginate, collate - return; - } - - // -- snip, extract -- - - if (!args.local) { + } else { const loggedIn = await loginOrRefreshIfRequired(); if (!loggedIn) { // didn't login, let's just quit @@ -1593,13 +1595,10 @@ export async function main(argv: string[]): Promise { throw new Error("No account id found, quitting..."); } } + console.log( + await listNamespaceKeys(config.account_id, namespaceId, prefix) + ); } - - // -- snip, end -- - - console.log( - await listNamespaceKeys(config.account_id, namespaceId, prefix) - ); } ) .command( @@ -1610,6 +1609,7 @@ export async function main(argv: string[]): Promise { .positional("key", { describe: "The key value to get.", type: "string", + demandOption: true, }) .option("binding", { type: "string", @@ -1686,6 +1686,7 @@ export async function main(argv: string[]): Promise { .positional("key", { describe: "The key value to delete", type: "string", + demandOption: true, }) .option("binding", { type: "string", @@ -1769,6 +1770,7 @@ export async function main(argv: string[]): Promise { .positional("filename", { describe: `The JSON file of key-value pairs to upload, in form [{"key":..., "value":...}"...]`, type: "string", + demandOption: true, }) .option("binding", { type: "string", @@ -1824,13 +1826,7 @@ export async function main(argv: string[]): Promise { expirationTtl: expiration_ttl, }); } - - return; - } - - // -- snip, extract -- - - if (!args.local) { + } else { const loggedIn = await loginOrRefreshIfRequired(); if (!loggedIn) { // didn't login, let's just quit @@ -1844,13 +1840,11 @@ export async function main(argv: string[]): Promise { throw new Error("No account id found, quitting..."); } } - } - - // -- snip, end -- - console.log( - await putBulkKeyValue(config.account_id, namespaceId, content) - ); + console.log( + await putBulkKeyValue(config.account_id, namespaceId, content) + ); + } } ) .command( @@ -1861,6 +1855,7 @@ export async function main(argv: string[]): Promise { .positional("filename", { describe: `The JSON file of key-value pairs to upload, in form ["key1", "key2", ...]`, type: "string", + demandOption: true, }) .option("binding", { type: "string", @@ -1904,13 +1899,7 @@ export async function main(argv: string[]): Promise { for (const key of parsedContent) { await ns.delete(key); } - - return; - } - - // -- snip, extract -- - - if (!args.local) { + } else { const loggedIn = await loginOrRefreshIfRequired(); if (!loggedIn) { // didn't login, let's just quit @@ -1924,13 +1913,15 @@ export async function main(argv: string[]): Promise { throw new Error("No account id found, quitting..."); } } - } - // -- snip, end -- - - console.log( - await deleteBulkKeyValue(config.account_id, namespaceId, content) - ); + console.log( + await deleteBulkKeyValue( + config.account_id, + namespaceId, + content + ) + ); + } } ); } diff --git a/packages/wrangler/src/inspect.ts b/packages/wrangler/src/inspect.ts index 68c9865062..d599d7c34a 100644 --- a/packages/wrangler/src/inspect.ts +++ b/packages/wrangler/src/inspect.ts @@ -60,7 +60,7 @@ export default function useInspector(props: InspectorProps) { const wsServerRef = useRef(); /** The websocket from the devtools instance. */ - const [localWebSocket, setLocalWebSocket] = useState(); + const [localWebSocket, setLocalWebSocket] = useState(); /** The websocket from the edge */ const [remoteWebSocket, setRemoteWebSocket] = useState< WebSocket | undefined @@ -78,7 +78,7 @@ export default function useInspector(props: InspectorProps) { res.end( JSON.stringify({ Browser: `wrangler/v${version}`, - // TODO: (someday): The DevTools protocol should match that of edgeworker. + // TODO: (someday): The DevTools protocol should match that of Edge Worker. // This could be exposed by the preview API. "Protocol-Version": "1.3", }) @@ -124,8 +124,8 @@ export default function useInspector(props: InspectorProps) { server: serverRef.current, clientTracking: true, }); - wsServerRef.current.on("connection", (ws: WebSocket) => { - if (wsServerRef.current.clients.size > 1) { + wsServerRef.current.on("connection", function (ws: WebSocket) { + if (this.clients.size > 1) { /** We only want to have one active Devtools instance at a time. */ console.error( "Tried to open a new devtools window when a previous one was already open." @@ -148,13 +148,19 @@ export default function useInspector(props: InspectorProps) { * of the component lifecycle. Convenient. */ useEffect(() => { - serverRef.current.listen(props.port); - return () => { - serverRef.current.close(); - // Also disconnect any open websockets/devtools connections - wsServerRef.current.clients.forEach((ws) => ws.close()); - wsServerRef.current.close(); - }; + if (serverRef.current) { + serverRef.current.listen(props.port); + return () => { + if (serverRef.current) { + serverRef.current.close(); + } + // Also disconnect any open websockets/devtools connections + if (wsServerRef.current) { + wsServerRef.current.clients.forEach((ws) => ws.close()); + wsServerRef.current.close(); + } + }; + } }, [props.port]); /** @@ -238,7 +244,7 @@ export default function useInspector(props: InspectorProps) { "🚨", // cheesy, but it works // maybe we could use color here too. params.exceptionDetails.text, - params.exceptionDetails.exception.description + params.exceptionDetails.exception?.description ?? "" ); } if (evt.method === "Runtime.consoleAPICalled") { @@ -284,7 +290,7 @@ export default function useInspector(props: InspectorProps) { clearInterval(keepAliveInterval); // Then we'll send a message to the devtools instance to // tell it to clear the console. - wsServerRef.current.clients.forEach((client) => { + wsServerRef.current?.clients.forEach((client) => { // We could've used `localSocket` here, but // then we would have had to add it to the effect // change detection array, which would have made a @@ -350,7 +356,7 @@ export default function useInspector(props: InspectorProps) { /** Send a message from the local websocket to the remote websocket */ function sendMessageToRemoteWebSocket(event: MessageEvent) { try { - remoteWebSocket.send(event.data); + remoteWebSocket?.send(event.data); } catch (e) { if (e.message !== "WebSocket is not open: readyState 0 (CONNECTING)") { /** @@ -367,7 +373,7 @@ export default function useInspector(props: InspectorProps) { /** Send a message from the local websocket to the remote websocket */ function sendMessageToLocalWebSocket(event: MessageEvent) { - localWebSocket.send(event.data); + localWebSocket?.send(event.data); } if (localWebSocket && remoteWebSocket) { @@ -420,7 +426,7 @@ function randomId(): string { * directly in the terminal. */ function logConsoleMessage(evt: Protocol.Runtime.ConsoleAPICalledEvent): void { - const args = []; + const args: string[] = []; for (const ro of evt.args) { switch (ro.type) { case "string": @@ -432,13 +438,13 @@ function logConsoleMessage(evt: Protocol.Runtime.ConsoleAPICalledEvent): void { args.push(ro.value); break; case "function": - args.push(`[Function: ${ro.description}]`); + args.push(`[Function: ${ro.description ?? ""}]`); break; case "object": if (!ro.preview) { - args.push(ro.description); + args.push(ro.description ?? ""); } else { - args.push(ro.preview.description); + args.push(ro.preview.description ?? ""); switch (ro.preview.subtype) { case "array": @@ -458,8 +464,10 @@ function logConsoleMessage(evt: Protocol.Runtime.ConsoleAPICalledEvent): void { args.push( "{\n" + ro.preview.entries - .map(({ key, value }) => { - return ` ${key.description} => ${value.description}`; + ?.map(({ key, value }) => { + return ` ${key?.description ?? ""} => ${ + value.description + }`; }) .join(",\n") + (ro.preview.overflow ? "\n ..." : "") + @@ -471,7 +479,7 @@ function logConsoleMessage(evt: Protocol.Runtime.ConsoleAPICalledEvent): void { args.push( "{ " + ro.preview.entries - .map(({ value }) => { + ?.map(({ value }) => { return `${value.description}`; }) .join(", ") + diff --git a/packages/wrangler/src/kv.tsx b/packages/wrangler/src/kv.tsx index 8b84033baf..edd28be1ef 100644 --- a/packages/wrangler/src/kv.tsx +++ b/packages/wrangler/src/kv.tsx @@ -82,7 +82,7 @@ export interface NamespaceKeyInfo { export async function listNamespaceKeys( accountId: string, namespaceId: string, - prefix?: string + prefix = "" ) { return await fetchListResult( `/accounts/${accountId}/storage/kv/namespaces/${namespaceId}/keys`, @@ -98,15 +98,20 @@ export async function putKeyValue( value: string, args?: { expiration?: number; expiration_ttl?: number } ) { + let searchParams: URLSearchParams | undefined; + if (args) { + searchParams = new URLSearchParams(); + if (args.expiration) { + searchParams.set("expiration", `${args.expiration}`); + } + if (args.expiration_ttl) { + searchParams.set("expiration_ttl", `${args.expiration_ttl}`); + } + } return await fetchResult( `/accounts/${accountId}/storage/kv/namespaces/${namespaceId}/values/${key}`, { method: "PUT", body: value }, - args - ? new URLSearchParams({ - expiration: args.expiration?.toString(), - expiration_ttl: args.expiration_ttl?.toString(), - }) - : undefined + searchParams ); } @@ -262,6 +267,10 @@ export function getNamespaceId({ /** * KV namespace binding names must be valid JS identifiers. */ -export function isValidNamespaceBinding(binding: string): boolean { - return /^[a-zA-Z_][a-zA-Z0-9_]*$/.test(binding); +export function isValidNamespaceBinding( + binding: string | undefined +): binding is string { + return ( + typeof binding === "string" && /^[a-zA-Z_][a-zA-Z0-9_]*$/.test(binding) + ); } diff --git a/packages/wrangler/src/pages.tsx b/packages/wrangler/src/pages.tsx index f257ed6de8..2d128ddb52 100644 --- a/packages/wrangler/src/pages.tsx +++ b/packages/wrangler/src/pages.tsx @@ -1,6 +1,5 @@ /* eslint-disable no-shadow */ -import assert from "assert"; import type { BuilderCallback } from "yargs"; import { join } from "path"; import { tmpdir } from "os"; @@ -22,7 +21,7 @@ import { generateConfigFromFileTree } from "../pages/functions/filepath-routing" import type { Headers, Request, fetch } from "@miniflare/core"; import type { MiniflareOptions } from "miniflare"; -const EXIT_CALLBACKS = []; +const EXIT_CALLBACKS: (() => void)[] = []; const EXIT = (message?: string, code?: number) => { if (message) console.log(message); if (code) process.exitCode = code; @@ -122,7 +121,9 @@ async function spawnProxyProcess({ }, } ); - EXIT_CALLBACKS.push(() => proxy.kill()); + EXIT_CALLBACKS.push(() => { + proxy.kill(); + }); proxy.stdout.on("data", (data) => { console.log(`[proxy]: ${data}`); @@ -862,11 +863,13 @@ export const pages: BuilderCallback = (yargs) => { // internally just waits for that promise to resolve. await scriptReadyPromise; - // Should only be called if no proxyPort, using `assert.fail()` here - // means the type of `assetsFetch` is still `typeof fetch` - const assetsFetch = proxyPort - ? () => assert.fail() - : await generateAssetsFetch(directory); + // `assetsFetch()` will only be called if there is `proxyPort` defined. + // We only define `proxyPort`, above, when there is no `directory` defined. + const assetsFetch = + directory !== undefined + ? await generateAssetsFetch(directory) + : invalidAssetsFetch; + const miniflare = new Miniflare({ port, watch: true, @@ -1029,3 +1032,9 @@ export const pages: BuilderCallback = (yargs) => { ) ); }; + +const invalidAssetsFetch: typeof fetch = () => { + throw new Error( + "Trying to fetch assets directly when there is no `directory` option specified, and not in `local` mode." + ); +}; diff --git a/packages/wrangler/src/proxy.ts b/packages/wrangler/src/proxy.ts index 5a1407bf3a..0e56ad94ce 100644 --- a/packages/wrangler/src/proxy.ts +++ b/packages/wrangler/src/proxy.ts @@ -1,5 +1,5 @@ import { connect } from "node:http2"; -import type { ServerHttp2Stream } from "node:http2"; +import type { ClientHttp2Session, ServerHttp2Stream } from "node:http2"; import { createServer } from "node:http"; import type { IncomingHttpHeaders, @@ -64,32 +64,7 @@ export function usePreviewServer({ }) { /** Creates an HTTP/1 proxy that sends requests over HTTP/2. */ const proxyServer = useRef(); - if (!proxyServer.current) { - proxyServer.current = createServer() - .on("request", function (req, res) { - // log all requests - console.log( - new Date().toLocaleTimeString(), - req.method, - req.url, - res.statusCode - ); - }) - .on("upgrade", (req) => { - // log all websocket connections - console.log( - new Date().toLocaleTimeString(), - req.method, - req.url, - 101, - "(WebSocket)" - ); - }) - .on("error", (err) => { - // log all connection errors - console.error(new Date().toLocaleTimeString(), err); - }); - } + const proxy = (proxyServer.current ??= createProxyServer()); /** * When we're not connected / getting a fresh token on changes, @@ -106,8 +81,6 @@ export function usePreviewServer({ >([]); useEffect(() => { - const proxy = proxyServer.current; - // If we don't have a token, that means either we're just starting up, // or we're refreshing the token. if (!previewToken) { @@ -147,23 +120,7 @@ export function usePreviewServer({ cleanupListeners.push(() => remote.destroy()); /** HTTP/2 -> HTTP/2 */ - function handleStream( - stream: ServerHttp2Stream, - headers: IncomingHttpHeaders - ) { - addCfPreviewTokenHeader(headers, previewToken.value); - headers[":authority"] = previewToken.host; - const request = stream.pipe(remote.request(headers)); - request.on("response", (responseHeaders: IncomingHttpHeaders) => { - rewriteRemoteHostToLocalHostInHeaders( - responseHeaders, - previewToken.host, - port - ); - stream.respond(responseHeaders); - request.pipe(stream, { end: true }); - }); - } + const handleStream = createStreamHandler(previewToken, remote, port); proxy.on("stream", handleStream); cleanupListeners.push(() => proxy.off("stream", handleStream)); @@ -194,7 +151,7 @@ export function usePreviewServer({ } const request = message.pipe(remote.request(headers)); request.on("response", (responseHeaders) => { - const status = responseHeaders[":status"]; + const status = responseHeaders[":status"] ?? 500; rewriteRemoteHostToLocalHostInHeaders( responseHeaders, previewToken.host, @@ -254,18 +211,18 @@ export function usePreviewServer({ return () => { cleanupListeners.forEach((d) => d()); }; - }, [previewToken, publicRoot, port]); + }, [previewToken, publicRoot, port, proxy]); // Start/stop the server whenever the // containing component is mounted/unmounted. useEffect(() => { - proxyServer.current.listen(port); + proxy.listen(port); console.log(`⬣ Listening at http://localhost:${port}`); return () => { - proxyServer.current.close(); + proxy.close(); }; - }, [port]); + }, [port, proxy]); } function createHandleAssetsRequest( @@ -292,3 +249,54 @@ const HTTP1_HEADERS = new Set([ "transfer-encoding", "http2-settings", ]); + +function createProxyServer() { + return createServer() + .on("request", function (req, res) { + // log all requests + console.log( + new Date().toLocaleTimeString(), + req.method, + req.url, + res.statusCode + ); + }) + .on("upgrade", (req) => { + // log all websocket connections + console.log( + new Date().toLocaleTimeString(), + req.method, + req.url, + 101, + "(WebSocket)" + ); + }) + .on("error", (err) => { + // log all connection errors + console.error(new Date().toLocaleTimeString(), err); + }); +} + +function createStreamHandler( + previewToken: CfPreviewToken, + remote: ClientHttp2Session, + port: number +) { + return function handleStream( + stream: ServerHttp2Stream, + headers: IncomingHttpHeaders + ) { + addCfPreviewTokenHeader(headers, previewToken.value); + headers[":authority"] = previewToken.host; + const request = stream.pipe(remote.request(headers)); + request.on("response", (responseHeaders: IncomingHttpHeaders) => { + rewriteRemoteHostToLocalHostInHeaders( + responseHeaders, + previewToken.host, + port + ); + stream.respond(responseHeaders); + request.pipe(stream, { end: true }); + }); + }; +} diff --git a/packages/wrangler/src/publish.ts b/packages/wrangler/src/publish.ts index bbb6ef3dfa..16efeb5d1f 100644 --- a/packages/wrangler/src/publish.ts +++ b/packages/wrangler/src/publish.ts @@ -11,7 +11,7 @@ import type { Config } from "./config"; import makeModuleCollector from "./module-collection"; import { syncAssets } from "./sites"; -type CfScriptFormat = void | "modules" | "service-worker"; +type CfScriptFormat = undefined | "modules" | "service-worker"; type Props = { config: Config; @@ -26,8 +26,8 @@ type Props = { triggers?: (string | number)[]; routes?: (string | number)[]; legacyEnv?: boolean; - jsxFactory: void | string; - jsxFragment: void | string; + jsxFactory: undefined | string; + jsxFragment: undefined | string; }; function sleep(ms: number) { @@ -50,13 +50,18 @@ export default async function publish(props: Props): Promise { __path__, } = config; - const envRootObj = props.env ? config.env[props.env] || {} : config; + const envRootObj = + props.env && config.env ? config.env[props.env] || {} : config; assert( envRootObj.compatibility_date || props["compatibility-date"], "A compatibility_date is required when publishing. Add one to your wrangler.toml file, or pass it in your terminal as --compatibility_date. See https://developers.cloudflare.com/workers/platform/compatibility-dates for more information." ); + if (accountId === undefined) { + throw new Error("No account_id provided."); + } + const triggers = props.triggers || config.triggers?.crons; const routes = props.routes || config.routes; @@ -129,18 +134,23 @@ export default async function publish(props: Props): Promise { ...(jsxFragment && { jsxFragment }), }); - const chunks = Object.entries(result.metafile.outputs).find( - ([_path, { entryPoint }]) => - entryPoint === - (props.public - ? path.join(path.dirname(file), "static-asset-facade.js") - : file) + const expectedEntryPoint = props.public + ? path.join(path.dirname(file), "static-asset-facade.js") + : file; + // result.metafile is defined because of the `metafile: true` option above. + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const outputEntry = Object.entries(result.metafile!.outputs).find( + ([, { entryPoint }]) => entryPoint === expectedEntryPoint ); - + if (outputEntry === undefined) { + throw new Error( + `Cannot find entry-point "${expectedEntryPoint}" in generated bundle.` + ); + } const { format } = props; const bundle = { - type: chunks[1].exports.length > 0 ? "esm" : "commonjs", - exports: chunks[1].exports, + type: outputEntry[1].exports.length > 0 ? "esm" : "commonjs", + exports: outputEntry[1].exports, }; // TODO: instead of bundling the facade with the worker, we should just bundle the worker and expose it as a module. @@ -157,13 +167,13 @@ export default async function publish(props: Props): Promise { return; } - const content = await readFile(chunks[0], { encoding: "utf-8" }); + const content = await readFile(outputEntry[0], { encoding: "utf-8" }); await destination.cleanup(); // if config.migrations // get current migration tag let migrations; - if ("migrations" in config) { + if ("migrations" in config && config.migrations !== undefined) { const scripts = await fetchResult<{ id: string; migration_tag: string }[]>( `/accounts/${accountId}/workers/scripts` ); @@ -199,15 +209,10 @@ export default async function publish(props: Props): Promise { } } - const assets = - props.public || props.site || props.config.site?.bucket // TODO: allow both - ? await syncAssets( - accountId, - scriptName, - props.public || props.site || props.config.site?.bucket, - false - ) - : { manifest: undefined, namespace: undefined }; + const assetPath = props.public || props.site || props.config.site?.bucket; // TODO: allow both + const assets = assetPath + ? await syncAssets(accountId, scriptName, assetPath, false) + : { manifest: undefined, namespace: undefined }; const bindings: CfWorkerInit["bindings"] = { kv_namespaces: envRootObj.kv_namespaces?.concat( @@ -223,7 +228,7 @@ export default async function publish(props: Props): Promise { const worker: CfWorkerInit = { name: scriptName, main: { - name: path.basename(chunks[0]), + name: path.basename(outputEntry[0]), content: content, type: bundle.type === "esm" ? "esm" : "commonjs", }, diff --git a/packages/wrangler/src/sites.tsx b/packages/wrangler/src/sites.tsx index 1d9a00d487..1fbb153600 100644 --- a/packages/wrangler/src/sites.tsx +++ b/packages/wrangler/src/sites.tsx @@ -70,6 +70,12 @@ async function createKVNamespaceIfNotAlreadyExisting( }; } +interface UploadEntry { + key: string; + value: string; + base64: boolean; +} + export async function syncAssets( accountId: string, scriptName: string, @@ -89,7 +95,7 @@ export async function syncAssets( ); const manifest = {}; - const upload = []; + const upload: UploadEntry[] = []; // TODO: this can be more efficient by parallelising for await (const file of getFilesInFolder(dirPath)) { // TODO: "exclude:" config diff --git a/packages/wrangler/src/user.tsx b/packages/wrangler/src/user.tsx index e66d37c7c3..1d9b01fd24 100644 --- a/packages/wrangler/src/user.tsx +++ b/packages/wrangler/src/user.tsx @@ -298,9 +298,9 @@ let initialised = false; // we do this because we have some async stuff // TODO: this should just happen in the top level -// abd we should fiure out how to do top level await +// abd we should figure out how to do top level await export async function initialise(): Promise { - // get refreshtoken/accesstoken from fs if exists + // get refreshToken/accessToken from fs if exists try { // if CF_API_TOKEN available, use that if (process.env.CF_API_TOKEN) { @@ -350,7 +350,9 @@ export function getAPIToken(): string { } throwIfNotInitialised(); - return LocalState.accessToken?.value; + // `throwIfNotInitialised()` ensures that the accessToken is guaranteed to be defined. + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return LocalState.accessToken!.value; } interface AccessContext { @@ -971,14 +973,14 @@ export async function getAccountId() { }); } catch (err) { // probably offline + return; } if (!response) return; - let accountId: string; - // @ts-expect-error need to type this response - const responseJSON: { + let accountId: string | undefined; + const responseJSON = (await response.json()) as { success: boolean; result: { id: string; account: { id: string; name: string } }[]; - } = await response.json(); + }; if (responseJSON.success === true) { if (responseJSON.result.length === 1) { diff --git a/tsconfig.json b/tsconfig.json index 939fdad333..65e40b047c 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -10,7 +10,8 @@ "noEmit": true, "lib": ["esnext"], "jsx": "react", - "resolveJsonModule": true + "resolveJsonModule": true, + "strictNullChecks": true }, "exclude": ["node_modules/", "packages/wrangler/vendor"] }